From 3ae70777a434bcb5ab383edc68859774d2b5e5a6 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Tue, 28 Apr 2026 19:44:39 -0700 Subject: [PATCH 01/14] Move some files to PointNMap package and import accordingly --- IOSAccessAssessment.xcodeproj/project.pbxproj | 62 +++++++++++-------- .../Annotation/AnnotationImageManager.swift | 1 + .../Contour/ContourFeatureRasterizer.swift | 1 + .../Mesh/Utils/MeshRasterizer.swift | 1 + .../Projection/Plane/PlaneRasterizer.swift | 1 + .../DamageDetectionRasterizer.swift | 1 + .../Shared/Definitions/RasterizeConfig.swift | 21 ------- .../Shared/SharedAppData.swift | 1 + .../Shared/Utils/Extensions.swift | 14 ----- .../Shared/Utils/MetalBufferUtils.swift | 18 +++--- .../Shared/Utils/SafeDeque.swift | 22 +++---- 11 files changed, 63 insertions(+), 80 deletions(-) delete mode 100644 IOSAccessAssessment/Shared/Definitions/RasterizeConfig.swift delete mode 100644 IOSAccessAssessment/Shared/Utils/Extensions.swift diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 7766b6d8..fc68742b 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 56; + objectVersion = 60; objects = { /* Begin PBXBuildFile section */ @@ -58,10 +58,13 @@ A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E012F26FA2700B96610 /* LocationExtension.swift */; }; A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */; }; + A34509D22FA1A49F003157B0 /* PointNMap in Frameworks */ = {isa = PBXBuildFile; productRef = A34509D12FA1A49F003157B0 /* PointNMap */; }; + A34509D42FA1A49F003157B0 /* ShaderTypes in Frameworks */ = {isa = PBXBuildFile; productRef = A34509D32FA1A49F003157B0 /* ShaderTypes */; }; + A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; + A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */; }; A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A355471D2EC1A47200F43AFD /* SharedAppData.swift */; }; - A35547C22EC1AE4E00F43AFD /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C12EC1AE4C00F43AFD /* SafeDeque.swift */; }; A35547C42EC1AF5700F43AFD /* CaptureData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C32EC1AF5500F43AFD /* CaptureData.swift */; }; A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */; }; A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */; }; @@ -109,7 +112,6 @@ A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; A3A413A22EC9C3FA0039298C /* MeshRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */; }; - A3A413A42ECD3C7E0039298C /* RasterizeConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A32ECD3C7B0039298C /* RasterizeConfig.swift */; }; A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A52ECD86260039298C /* AccessibilityFeature.swift */; }; A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413AC2ECF94950039298C /* DBSCAN.swift */; }; A3A45F0A2EE7A4E10029F5AE /* UnionOfMasksPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A45F092EE7A4DE0029F5AE /* UnionOfMasksPolicy.swift */; }; @@ -123,7 +125,6 @@ A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BDA22F83296C0036C6EC /* ProjectedWorldPointsExtension.swift */; }; A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; - A3B61FC92F78F93B0052AE2C /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC82F78F9390052AE2C /* Extensions.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; A3BB5AFB2DB210AE008673ED /* BinaryMaskFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */; }; A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; @@ -141,7 +142,6 @@ A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D752E654F14003BFE78 /* ProfileView.swift */; }; A3DA4DA82EB94D84005BB812 /* MeshGPUSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */; }; A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */; }; - A3DA4DB12EB99A5C005BB812 /* MetalBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DB02EB99A5A005BB812 /* MetalBufferUtils.swift */; }; A3DA4DB62EBAE101005BB812 /* Stub.m in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DB52EBAE101005BB812 /* Stub.m */; }; A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */; }; A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */; }; @@ -283,10 +283,11 @@ A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEnvironment.swift; sourceTree = ""; }; A3431E012F26FA2700B96610 /* LocationExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationExtension.swift; sourceTree = ""; }; A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherAttributeExtensionLegacy.swift; sourceTree = ""; }; + A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; + A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalBufferUtils.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourRequestProcessor.swift; sourceTree = ""; }; A355471D2EC1A47200F43AFD /* SharedAppData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppData.swift; sourceTree = ""; }; - A35547C12EC1AE4C00F43AFD /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; A35547C32EC1AF5500F43AFD /* CaptureData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureData.swift; sourceTree = ""; }; A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappedFeaturesData.swift; sourceTree = ""; }; A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturedMeshSnapshot.swift; sourceTree = ""; }; @@ -336,7 +337,6 @@ A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshRasterizer.swift; sourceTree = ""; }; - A3A413A32ECD3C7B0039298C /* RasterizeConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RasterizeConfig.swift; sourceTree = ""; }; A3A413A52ECD86260039298C /* AccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeature.swift; sourceTree = ""; }; A3A413AC2ECF94950039298C /* DBSCAN.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DBSCAN.swift; sourceTree = ""; }; A3A45F092EE7A4DE0029F5AE /* UnionOfMasksPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnionOfMasksPolicy.swift; sourceTree = ""; }; @@ -350,7 +350,6 @@ A3B5BDA22F83296C0036C6EC /* ProjectedWorldPointsExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProjectedWorldPointsExtension.swift; sourceTree = ""; }; A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsGridExtension.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; - A3B61FC82F78F9390052AE2C /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BinaryMaskFilter.swift; sourceTree = ""; }; A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; @@ -367,7 +366,6 @@ A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshGPUSnapshot.swift; sourceTree = ""; }; A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = MeshPipeline.metal; sourceTree = ""; }; - A3DA4DB02EB99A5A005BB812 /* MetalBufferUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalBufferUtils.swift; sourceTree = ""; }; A3DA4DB32EBAE05C005BB812 /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = ""; }; A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; A3DA4DB52EBAE101005BB812 /* Stub.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Stub.m; sourceTree = ""; }; @@ -430,8 +428,10 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + A34509D22FA1A49F003157B0 /* PointNMap in Frameworks */, A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */, A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */, + A34509D42FA1A49F003157B0 /* ShaderTypes in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -537,7 +537,7 @@ isa = PBXGroup; children = ( A37E720A2ED571A800CFE4EF /* Definitions */, - A35547C02EC1AE4600F43AFD /* Utils */, + A34509DB2FA1A7A7003157B0 /* Utils */, A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */, A355471D2EC1A47200F43AFD /* SharedAppData.swift */, DAA7F8B42CA38C11003666D8 /* Constants.swift */, @@ -820,6 +820,15 @@ path = Extensions; sourceTree = ""; }; + A34509DB2FA1A7A7003157B0 /* Utils */ = { + isa = PBXGroup; + children = ( + A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */, + A34509D72FA1A6FA003157B0 /* SafeDeque.swift */, + ); + path = Utils; + sourceTree = ""; + }; A34B70CC2DDFE638007B191F /* ARCamera */ = { isa = PBXGroup; children = ( @@ -833,16 +842,6 @@ path = ARCamera; sourceTree = ""; }; - A35547C02EC1AE4600F43AFD /* Utils */ = { - isa = PBXGroup; - children = ( - A3B61FC82F78F9390052AE2C /* Extensions.swift */, - A35547C12EC1AE4C00F43AFD /* SafeDeque.swift */, - A3DA4DB02EB99A5A005BB812 /* MetalBufferUtils.swift */, - ); - path = Utils; - sourceTree = ""; - }; A35E050B2EDE359C003C26CF /* AttributeEstimation */ = { isa = PBXGroup; children = ( @@ -987,7 +986,6 @@ A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */, A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */, A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */, - A3A413A32ECD3C7B0039298C /* RasterizeConfig.swift */, ); path = Definitions; sourceTree = ""; @@ -1385,6 +1383,7 @@ mainGroup = 3222F90D2B622DFD0019A079; packageReferences = ( CAF812C12CFA0FD400D44B84 /* XCRemoteSwiftPackageReference "swift-collections" */, + A34509D02FA1A49F003157B0 /* XCLocalSwiftPackageReference "../PointNMap" */, ); productRefGroup = 3222F9172B622DFD0019A079 /* Products */; projectDirPath = ""; @@ -1467,7 +1466,6 @@ A30BED382ED162E7004A5B51 /* MeshDefinitions.swift in Sources */, A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */, A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */, - A35547C22EC1AE4E00F43AFD /* SafeDeque.swift in Sources */, A3C55A492EAFFABF00F6CFDC /* CenterCropTransformUtilsExtension.swift in Sources */, A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, @@ -1521,6 +1519,7 @@ A3DC22F92DD036AF0020CE84 /* UnionOfMasksProcessor.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A32D66FD2F7EF10F00DC4173 /* DamageDetectionPipeline.swift in Sources */, + A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */, A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, @@ -1547,12 +1546,10 @@ A30801502EC0926800B1BA3A /* ContourUtils.swift in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, - A3DA4DB12EB99A5C005BB812 /* MetalBufferUtils.swift in Sources */, A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */, A3EE6E4E2F5A258B00F515E6 /* TestCameraManager.swift in Sources */, A37E72102ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift in Sources */, A329434C2EE7CFE800C4C1BC /* OSWField.swift in Sources */, - A3A413A42ECD3C7E0039298C /* RasterizeConfig.swift in Sources */, A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */, A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */, A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, @@ -1565,6 +1562,7 @@ A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, + A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, A3E161D22F3A8AF6002D4D08 /* CenterCropTransformUtils.swift in Sources */, A35E050D2EDE35E1003C26CF /* LocalizationProcessor.swift in Sources */, @@ -1606,7 +1604,6 @@ A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */, A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */, - A3B61FC92F78F93B0052AE2C /* Extensions.swift in Sources */, DAA7F8B52CA38C11003666D8 /* Constants.swift in Sources */, A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */, ); @@ -1973,6 +1970,13 @@ }; /* End XCConfigurationList section */ +/* Begin XCLocalSwiftPackageReference section */ + A34509D02FA1A49F003157B0 /* XCLocalSwiftPackageReference "../PointNMap" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = ../PointNMap; + }; +/* End XCLocalSwiftPackageReference section */ + /* Begin XCRemoteSwiftPackageReference section */ A3C22FD62CF2F0C300533BF7 /* XCRemoteSwiftPackageReference "swift-collections" */ = { isa = XCRemoteSwiftPackageReference; @@ -1993,6 +1997,14 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ + A34509D12FA1A49F003157B0 /* PointNMap */ = { + isa = XCSwiftPackageProductDependency; + productName = PointNMap; + }; + A34509D32FA1A49F003157B0 /* ShaderTypes */ = { + isa = XCSwiftPackageProductDependency; + productName = ShaderTypes; + }; A3C22FD72CF2F0C300533BF7 /* DequeModule */ = { isa = XCSwiftPackageProductDependency; package = A3C22FD62CF2F0C300533BF7 /* XCRemoteSwiftPackageReference "swift-collections" */; diff --git a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift index df964bf2..7e67d554 100644 --- a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift +++ b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift @@ -6,6 +6,7 @@ // import SwiftUI import DequeModule +import PointNMap enum AnnotationImageManagerError: Error, LocalizedError { case notConfigured diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift index dccb6acd..2af0f6ce 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift @@ -7,6 +7,7 @@ import CoreImage import UIKit +import PointNMap /** A temporary struct to perform rasterization of detected objects. diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift b/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift index 94f67a5e..ca02d67d 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift @@ -7,6 +7,7 @@ import CoreImage import UIKit +import PointNMap /** Functions to rasterize mesh triangles into an image. diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift b/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift index 4583f90d..895b2dbc 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift @@ -7,6 +7,7 @@ import CoreImage import UIKit +import PointNMap struct PlaneRasterizer { /** diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift b/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift index 3616a858..a74aca19 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift +++ b/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift @@ -7,6 +7,7 @@ import CoreImage import UIKit +import PointNMap struct DamageDetectionRasterizer { static func rasterizeDamageDetection( diff --git a/IOSAccessAssessment/Shared/Definitions/RasterizeConfig.swift b/IOSAccessAssessment/Shared/Definitions/RasterizeConfig.swift deleted file mode 100644 index ccfeda33..00000000 --- a/IOSAccessAssessment/Shared/Definitions/RasterizeConfig.swift +++ /dev/null @@ -1,21 +0,0 @@ -// -// RasterizeConfig.swift -// IOSAccessAssessment -// -// Created by Himanshu on 11/18/25. -// -import UIKit - -struct RasterizeConfig { - let draw: Bool - let color: UIColor? - let width: CGFloat - let alpha: CGFloat - - init(draw: Bool = true, color: UIColor?, width: CGFloat = 2.0, alpha: CGFloat = 1.0) { - self.draw = draw - self.color = color - self.width = width - self.alpha = alpha - } -} diff --git a/IOSAccessAssessment/Shared/SharedAppData.swift b/IOSAccessAssessment/Shared/SharedAppData.swift index 50d5307b..15eedd45 100644 --- a/IOSAccessAssessment/Shared/SharedAppData.swift +++ b/IOSAccessAssessment/Shared/SharedAppData.swift @@ -8,6 +8,7 @@ import SwiftUI import DequeModule import simd +import PointNMap @MainActor final class SharedAppData: ObservableObject { diff --git a/IOSAccessAssessment/Shared/Utils/Extensions.swift b/IOSAccessAssessment/Shared/Utils/Extensions.swift deleted file mode 100644 index 0c8646fb..00000000 --- a/IOSAccessAssessment/Shared/Utils/Extensions.swift +++ /dev/null @@ -1,14 +0,0 @@ -// -// Extensions.swift -// IOSAccessAssessment -// -// Created by Himanshu on 3/28/26. -// - -import Foundation - -extension Double { - func roundedTo7Digits() -> Double { - (self * 1_000_0000).rounded() / 1_000_0000 - } -} diff --git a/IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift b/IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift index 7a53e178..3f170a64 100644 --- a/IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift +++ b/IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift @@ -9,11 +9,11 @@ import RealityKit import Metal import simd -enum MetalBufferUtilsError: Error, LocalizedError { +public enum MetalBufferUtilsError: Error, LocalizedError { case bufferTooSmall(expected: Int, actual: Int) case bufferCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .bufferTooSmall(let expected, let actual): return "The provided buffer is too small. Expected at least \(expected) bytes, but got \(actual) bytes." @@ -24,11 +24,11 @@ enum MetalBufferUtilsError: Error, LocalizedError { } -struct MetalBufferUtils { - static let defaultBufferSize: Int = 1024 +public struct MetalBufferUtils { + public static let defaultBufferSize: Int = 1024 @inline(__always) - static func copyContiguous(srcPtr: UnsafeRawPointer, dst: MTLBuffer, byteCount: Int) throws { + public static func copyContiguous(srcPtr: UnsafeRawPointer, dst: MTLBuffer, byteCount: Int) throws { guard byteCount <= dst.length else { throw MetalBufferUtilsError.bufferTooSmall(expected: byteCount, actual: dst.length) } @@ -37,7 +37,7 @@ struct MetalBufferUtils { } @inline(__always) - static func copyStrided(count: Int, srcPtr: UnsafeRawPointer, srcStride: Int, + public static func copyStrided(count: Int, srcPtr: UnsafeRawPointer, srcStride: Int, dst: MTLBuffer, elemSize: Int) throws { guard count * elemSize <= dst.length else { throw MetalBufferUtilsError.bufferTooSmall(expected: count * elemSize, actual: dst.length) @@ -51,7 +51,7 @@ struct MetalBufferUtils { } @inline(__always) - static func ensureCapacity(device: MTLDevice, buf: inout MTLBuffer, requiredBytes: Int) throws { + public static func ensureCapacity(device: MTLDevice, buf: inout MTLBuffer, requiredBytes: Int) throws { if buf.length < requiredBytes { let newCapacity = nextCap(requiredBytes) buf = try makeBuffer(device: device, length: newCapacity, options: .storageModeShared) @@ -59,7 +59,7 @@ struct MetalBufferUtils { } @inline(__always) - static func makeBuffer(device: MTLDevice, length: Int, options: MTLResourceOptions = .storageModeShared) throws -> MTLBuffer { + public static func makeBuffer(device: MTLDevice, length: Int, options: MTLResourceOptions = .storageModeShared) throws -> MTLBuffer { guard let buffer = device.makeBuffer(length: length, options: options) else { throw MetalBufferUtilsError.bufferCreationFailed } @@ -70,7 +70,7 @@ struct MetalBufferUtils { Calculate the next power-of-two capacity greater than or equal to needed */ @inline(__always) - static func nextCap(_ needed: Int, minimum: Int = 1024) -> Int { + public static func nextCap(_ needed: Int, minimum: Int = 1024) -> Int { let maximum: Int = Int.max >> 2 if needed > maximum { return Int.max diff --git a/IOSAccessAssessment/Shared/Utils/SafeDeque.swift b/IOSAccessAssessment/Shared/Utils/SafeDeque.swift index 9fa07c57..03b4c5f9 100644 --- a/IOSAccessAssessment/Shared/Utils/SafeDeque.swift +++ b/IOSAccessAssessment/Shared/Utils/SafeDeque.swift @@ -7,29 +7,29 @@ import DequeModule -actor SafeDeque: Sendable { +public actor SafeDeque: Sendable { private var storage = Deque() private let capacity: Int - var isEmpty: Bool { storage.isEmpty } - var count: Int { storage.count } + public var isEmpty: Bool { storage.isEmpty } + public var count: Int { storage.count } - init(capacity: Int = 1) { + public init(capacity: Int = 1) { self.capacity = capacity } /// Cheap value snapshot (copy-on-write) - func snapshot() -> Deque { storage } + public func snapshot() -> Deque { storage } - subscript(index: Deque.Index) -> Element { storage[index] } + public subscript(index: Deque.Index) -> Element { storage[index] } - func appendBack(_ element: Element) { + public func appendBack(_ element: Element) { if storage.count >= capacity { _ = storage.popFirst() } storage.append(element) } - func appendFront(_ element: Element) { + public func appendFront(_ element: Element) { if storage.count >= capacity { _ = storage.popLast() } @@ -37,16 +37,16 @@ actor SafeDeque: Sendable { } @discardableResult - func popBack() -> Element? { + public func popBack() -> Element? { storage.popLast() } @discardableResult - func popFront() -> Element? { + public func popFront() -> Element? { storage.popFirst() } - func removeAll(keepingCapacity: Bool = false) { + public func removeAll(keepingCapacity: Bool = false) { storage.removeAll(keepingCapacity: keepingCapacity) } } From 2e6b6003abafe15c9eeb7f776b5d003da07a101f Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Wed, 29 Apr 2026 22:30:52 -0700 Subject: [PATCH 02/14] Attempt first set of source code movement into package --- IOSAccessAssessment.xcodeproj/project.pbxproj | 40 +- .../AttributeEstimationPipeline.swift | 1 + .../OtherAttributeExtensionLegacy.swift | 1 + .../Location/LocationExtension.swift | 1 + .../Location/LocationFromImageExtension.swift | 1 + .../Location/LocationFromMeshExtension.swift | 1 + .../Geospatial/LocationHelpersExtension.swift | 371 ++++++++++++++++++ .../{Geospatial => }/LocationHelpers.swift | 0 .../Definitions/CurrentMappingData.swift | 1 + .../TDEI/OSM/OSMLocation.swift | 6 +- .../TDEI/Services/WorkspaceService.swift | 1 + IOSAccessAssessment/View/ARCameraView.swift | 1 + .../View/TestMode/TestCameraView.swift | 1 + 13 files changed, 403 insertions(+), 23 deletions(-) create mode 100644 IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift rename IOSAccessAssessment/{Geospatial => }/LocationHelpers.swift (100%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index fc68742b..d824c8ab 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -49,7 +49,6 @@ A32943552EE8186E00C4C1BC /* OSWPoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943542EE8186C00C4C1BC /* OSWPoint.swift */; }; A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943562EE81BF700C4C1BC /* OSWLineString.swift */; }; A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943582EE8204400C4C1BC /* OSWPolygon.swift */; }; - A32D66512F7B343800DC4173 /* LocationHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66502F7B343500DC4173 /* LocationHelpers.swift */; }; A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */; }; A32D66F72F7EE88300DC4173 /* v8n_175_16_960.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A32D66F62F7EE88300DC4173 /* v8n_175_16_960.mlpackage */; }; A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66FA2F7EE9D500DC4173 /* DamageDetectionModelRequestProcessor.swift */; }; @@ -62,6 +61,7 @@ A34509D42FA1A49F003157B0 /* ShaderTypes in Frameworks */ = {isa = PBXBuildFile; productRef = A34509D32FA1A49F003157B0 /* ShaderTypes */; }; A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */; }; + A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */; }; A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A355471D2EC1A47200F43AFD /* SharedAppData.swift */; }; @@ -73,8 +73,6 @@ A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */; }; A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */; }; A35BB2862DC30386009A3FE0 /* CameraOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */; }; - A35E050A2EDE299F003C26CF /* LocationManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05092EDE2999003C26CF /* LocationManager.swift */; }; - A35E050D2EDE35E1003C26CF /* LocalizationProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050C2EDE35DE003C26CF /* LocalizationProcessor.swift */; }; A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */; }; A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */; }; A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05172EDEA470003C26CF /* AttributeEstimationPipeline.swift */; }; @@ -274,7 +272,6 @@ A32943542EE8186C00C4C1BC /* OSWPoint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPoint.swift; sourceTree = ""; }; A32943562EE81BF700C4C1BC /* OSWLineString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWLineString.swift; sourceTree = ""; }; A32943582EE8204400C4C1BC /* OSWPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolygon.swift; sourceTree = ""; }; - A32D66502F7B343500DC4173 /* LocationHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpers.swift; sourceTree = ""; }; A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWMultiPolygon.swift; sourceTree = ""; }; A32D66F62F7EE88300DC4173 /* v8n_175_16_960.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = v8n_175_16_960.mlpackage; sourceTree = ""; }; A32D66FA2F7EE9D500DC4173 /* DamageDetectionModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionModelRequestProcessor.swift; sourceTree = ""; }; @@ -285,6 +282,7 @@ A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherAttributeExtensionLegacy.swift; sourceTree = ""; }; A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalBufferUtils.swift; sourceTree = ""; }; + A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpersExtension.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourRequestProcessor.swift; sourceTree = ""; }; A355471D2EC1A47200F43AFD /* SharedAppData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppData.swift; sourceTree = ""; }; @@ -296,8 +294,6 @@ A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceSelectionView.swift; sourceTree = ""; }; A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceService.swift; sourceTree = ""; }; A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraOrientation.swift; sourceTree = ""; }; - A35E05092EDE2999003C26CF /* LocationManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationManager.swift; sourceTree = ""; }; - A35E050C2EDE35DE003C26CF /* LocalizationProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocalizationProcessor.swift; sourceTree = ""; }; A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InvalidContentView.swift; sourceTree = ""; }; A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIChangesetUploadController.swift; sourceTree = ""; }; A35E05172EDEA470003C26CF /* AttributeEstimationPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttributeEstimationPipeline.swift; sourceTree = ""; }; @@ -475,12 +471,13 @@ 3222F9182B622DFD0019A079 /* IOSAccessAssessment */ = { isa = PBXGroup; children = ( + A34509DC2FA31D9C003157B0 /* New Group */, A3FE166A2E18DD2A00DAE5BE /* Info.plist */, A39C9F392DD9B01200455E45 /* TDEI */, A30801512EC0984F00B1BA3A /* AccessibilityFeature */, A305B05A2E1887AE00ECCF9B /* LocalDataset */, A34B70CC2DDFE638007B191F /* ARCamera */, - A30F59C92F7EF9EE00EE7804 /* Geospatial */, + A34509E22FA31DCC003157B0 /* Geospatial */, A3E84ECE2DDAC7980096A645 /* Annotation */, A33EB5AE2F761C83008ABFB7 /* ComputerVision */, 55659C092BB785EA0094DF01 /* MachineLearning */, @@ -661,16 +658,6 @@ path = Definitions; sourceTree = ""; }; - A30F59C92F7EF9EE00EE7804 /* Geospatial */ = { - isa = PBXGroup; - children = ( - A32D66502F7B343500DC4173 /* LocationHelpers.swift */, - A35E05092EDE2999003C26CF /* LocationManager.swift */, - A35E050C2EDE35DE003C26CF /* LocalizationProcessor.swift */, - ); - path = Geospatial; - sourceTree = ""; - }; A30F59CA2F7EFA8A00EE7804 /* OtherAttributes */ = { isa = PBXGroup; children = ( @@ -829,6 +816,21 @@ path = Utils; sourceTree = ""; }; + A34509DC2FA31D9C003157B0 /* New Group */ = { + isa = PBXGroup; + children = ( + ); + path = "New Group"; + sourceTree = ""; + }; + A34509E22FA31DCC003157B0 /* Geospatial */ = { + isa = PBXGroup; + children = ( + A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */, + ); + path = Geospatial; + sourceTree = ""; + }; A34B70CC2DDFE638007B191F /* ARCamera */ = { isa = PBXGroup; children = ( @@ -1471,7 +1473,6 @@ A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */, A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */, - A32D66512F7B343800DC4173 /* LocationHelpers.swift in Sources */, A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */, CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, @@ -1508,6 +1509,7 @@ A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */, A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */, A3DC22E92DCF0F9A0020CE84 /* ImageProcessing.metal in Sources */, + A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, A37E72142ED95C0C00CFE4EF /* MeshHelpers.swift in Sources */, A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, A3C1D7352F84A78E00833411 /* SurfaceNormalsProcessor.swift in Sources */, @@ -1534,7 +1536,6 @@ A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */, A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */, CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, - A35E050A2EDE299F003C26CF /* LocationManager.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */, A3A45F0C2EE7A4F40029F5AE /* ContourDetectionPolicy.swift in Sources */, @@ -1565,7 +1566,6 @@ A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, A3E161D22F3A8AF6002D4D08 /* CenterCropTransformUtils.swift in Sources */, - A35E050D2EDE35E1003C26CF /* LocalizationProcessor.swift in Sources */, A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index db1a6e1f..3ffb770f 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -8,6 +8,7 @@ import SwiftUI import CoreLocation import MapKit +import PointNMap enum AttributeEstimationPipelineError: Error, LocalizedError { case configurationError(String) diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift index 2c104007..c0df658a 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift @@ -6,6 +6,7 @@ // import SwiftUI import CoreLocation +import PointNMap /** Extension for attribute calculation with rudimentary methods. diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift index 329e3665..321b7f01 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift @@ -6,6 +6,7 @@ // import SwiftUI import CoreLocation +import PointNMap extension AttributeEstimationPipeline { func calculateLocation( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift index 44bad53e..dcc291c0 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift @@ -6,6 +6,7 @@ // import SwiftUI import CoreLocation +import PointNMap extension AttributeEstimationPipeline { func getLocationFromImageByCentroid( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift index 4569937c..687078af 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMap extension AttributeEstimationPipeline { func getLocationFromMeshForLineStringByPlane( diff --git a/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift b/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift new file mode 100644 index 00000000..5ca8e1ad --- /dev/null +++ b/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift @@ -0,0 +1,371 @@ +// +// LocationHelpers.swift +// IOSAccessAssessment +// +// Created by Himanshu on 3/30/26. +// + +import CoreLocation +import UIKit +import MapKit +import PointNMap + +public extension LocationHelpers { + /** + Calculates the distance between two locations represented by their location details if they have similar geometry types. + Not commutative, checks distance from source to destination, so the order of the parameters matters. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + First, checks the geometry types of the source and destination location details (e.g., point, linestring, polygon) based on the properties of their last location element. Then, based on the geometry types, it calls the appropriate distance calculation method (e.g., distanceBetweenPoints, distanceFromPointToLineString, distanceFromPointToPolygon, distanceBetweenLineStrings, distanceFromLineStringToPolygon, distanceBetweenPolygons) to compute the distance between the two locations. + */ + static func distanceBetweenSimilarOSMLocationDetails( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLastLocationElement = srcLocationDetails.locations.last else { + return nil + } +// let isSrcMultipolygon = srcLocationDetails.locations.count > 1 + let isSrcPolygon = srcLastLocationElement.isWay && srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + let isSrcLineString = srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + let isSrcPoint = !srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + + guard let dstLastLocationElement = dstLocationDetails.locations.last else { + return nil + } +// let isDstMultipolygon = dstLocationDetails.locations.count > 1 + let isDstPolygon = dstLastLocationElement.isWay && dstLastLocationElement.isClosed // && (!isDstMultipolygon) + let isDstLineString = dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) + let isDstPoint = !dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) + + if isSrcPoint && isDstPoint { + return distanceBetweenPoints(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else if isSrcLineString && isDstLineString { + return distanceBetweenLineStrings(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else if isSrcPolygon && isDstPolygon { + return distanceBetweenPolygons(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else { + return nil + } + } + + /** + Calculates the distance between two points represented by their location details. The distance is returned in meters. + Unit of distance is determined by MapKit's MKMapPoint. + */ + static func distanceBetweenPoints( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == false, dstLocationElement.isClosed == false, + let dstLocationCoordinate = dstLocationElement.coordinates.last else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let dstLocation = CLLocation(latitude: dstLocationCoordinate.latitude, longitude: dstLocationCoordinate.longitude) + return MKDistanceHelpers.distanceBetweenPoints(srcPoint: MKMapPoint(srcLocationCoordinate), dstPoint: MKMapPoint(dstLocationCoordinate)) + } + + /** + Calculates the shortest distance from a point to a linestring represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestring into map points, then iterates through each line segment of the linestring and calculates the distance from the point to that line segment using the distanceFromPointToLineSegment method. The minimum distance found across all segments is returned as the distance from the point to the linestring. + */ + static func distanceFromPointToLineString( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let srcMapPoint = MKMapPoint(srcLocationCoordinate) + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcMapPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + return minDistance + } + + /** + Calculates the shortest distance from a point to a polygon (single polygon) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygon into map points, then iterates through each edge of the polygon and calculates the distance from the point to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges is returned as the distance from the point to the polygon. If the point is inside the polygon, the distance returned is 0. + */ + static func distanceFromPointToPolygon( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let srcMapPoint = MKMapPoint(srcLocationCoordinate) + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + return MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcMapPoint, polygonPoints: dstMapPoints) + } + +// static func distanceFromPointToMultiPolygon( +// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// ) -> Double? { +// var minDistance: Double = Double.infinity +// dstLocationDetails.locations.forEach { locationElement in +// guard locationElement.isWay == true, locationElement.isClosed == true else { +// return +// } +// let singlePolygonLocationDetails = OSMLocationDetails(locations: [locationElement]) +// if let distance = distanceFromPointToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { +// minDistance = min(minDistance, distance) +// } +// } +// return minDistance +// } + + /** + Calculates the shortest distance between two linestrings represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestrings into map points, then iterates through each line segment of the dst linestring and calculates the distance from each point in the source linestring to that line segment. The minimum distance found across all segments and points is returned as the distance between the two linestrings. + + - Warning: + The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two linestrings may partially overlap with each other, and the distance should reflect how much of the linestrings are outside of each other rather than just indicating that there is some overlap. + */ + static func distanceBetweenLineStrings( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == false, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + } + return minDistance + } + + /** + Calculates the shortest distance from a linestring to a polygon (single polygon) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestring and polygon into map points, then iterates through each edge of the polygon and calculates the distance from each point in the linestring to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance from the linestring to the polygon. If any point of the linestring is inside the polygon, the distance returned is 0. + + - Warning: + The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, a linestring may partially overlap with a polygon, and the distance should reflect how much of the linestring is outside the polygon rather than just indicating that there is some overlap. + */ + static func distanceFromLineStringToPolygon( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == false, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + } + return minDistance + } + +// static func distanceFromLineStringToMultiPolygon( +// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// ) -> Double? { +// var minDistance: Double = Double.infinity +// dstLocationDetails.locations.forEach { locationElement in +// guard locationElement.isWay == true, locationElement.isClosed == true else { +// return +// } +// let singlePolygonLocationDetails = OSMLocationDetails(locations: [locationElement]) +// if let distance = distanceFromLineStringToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { +// minDistance = min(minDistance, distance) +// } +// } +// return minDistance +// } + + /** + Calculates the shortest distance between two polygons (single polygons) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygons into map points, then iterates through each edge of the first polygon and calculates the distance from each point in the second polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. If any point of one polygon is inside the other polygon, the distance returned is 0. + + - Warning: + The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. + */ + static func distanceBetweenPolygons( + srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == true, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcPoint, polygonPoints: dstMapPoints) { + minDistance = min(minDistance, distance) + } + } + return minDistance + } + + /** + Calculates the shortest distance between two polygons represented by their location details. + Can have negative distance if there is polygon overlap, where the absolute value of negative distances represents the degree of overlap. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygons into map points, then iterates through each edge of the source polygon and calculates the distance from each point in the destination polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. + + - Warning: + The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. + + - Warning: + Currently, this algorithm doesn't actually consider the relation role of each multi-polygon member (e.g. outer vs inner), which can lead to inaccurate distance calculations in some cases. For example, if one of the multi-polygons has an inner member that overlaps with the other multi-polygon, the distance should be negative to reflect the degree of overlap. However, without considering the relation type, the algorithm may simply return a distance of 0 for this case, which does not accurately capture the spatial relationship between the two multi-polygons. + */ +// static func distanceBetweenMultiPolygons( +// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// ) -> Double? { +// let srcLocationCoordinateArrays = srcLocationDetails.locations +// let dstLocationCoordinateArrays = dstLocationDetails.locations +// guard srcLocationCoordinateArrays.count > 0, dstLocationCoordinateArrays.count > 0 else { +// return nil +// } +// +// var minDistance: Double = Double.infinity +// for srcLocationCoordinateArray in srcLocationCoordinateArrays { +// for dstLocationCoordinateArray in dstLocationCoordinateArrays { +// let srcOSMLocationDetails = OSMLocationDetails(locations: [srcLocationCoordinateArray]) +// let dstOSMLocationDetails = OSMLocationDetails(locations: [dstLocationCoordinateArray]) +// /// While deciding the geometry, we are not using the .polygon enumeration, since that actually represents a multipolygon in OSW. +// let srcGeometry: OSWGeometry = srcLocationCoordinateArray.isWay ? .linestring : .point +// let isSrcPolygon = srcLocationCoordinateArray.isWay && srcLocationCoordinateArray.isClosed +// let dstGeometry: OSWGeometry = dstLocationCoordinateArray.isWay ? .linestring : .point +// let isDstPolygon = dstLocationCoordinateArray.isWay && dstLocationCoordinateArray.isClosed +// +// /// Must ensure the same units (in this case, decided by MKMapPoint) +// if (srcGeometry == .point && dstGeometry == .point) { +// guard let distance = distanceBetweenPoints( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if (srcGeometry == .point && (dstGeometry == .linestring && !isDstPolygon)) { +// guard let distance = distanceFromPointToLineString( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if (srcGeometry == .point && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceFromPointToPolygon( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && !isDstPolygon)) { +// guard let distance = distanceBetweenLineStrings( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceFromLineStringToPolygon( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceBetweenPolygons( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// } +// else { +// continue +// } +// } +// } +// return minDistance +// } +} + diff --git a/IOSAccessAssessment/Geospatial/LocationHelpers.swift b/IOSAccessAssessment/LocationHelpers.swift similarity index 100% rename from IOSAccessAssessment/Geospatial/LocationHelpers.swift rename to IOSAccessAssessment/LocationHelpers.swift diff --git a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift index e28ee94d..d45f2138 100644 --- a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift +++ b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMap enum CurrentMappingDataError: Error, LocalizedError { } diff --git a/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift b/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift index df2d4f35..1714241b 100644 --- a/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift +++ b/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift @@ -64,7 +64,7 @@ struct OSMLocationElement: Codable, Sendable { For relation support: we can treat OSMLocationDetails as a tree of OSMLocationElement structs, where each OSMLocationElement can either have a set of coordinates (for nodes and ways) or a set of child OSMLocationElements (for relations). This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. This will be an easier modification because we can simply add an optional `members` property to OSMLocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. However, this will need modification to caller code that constructs/uses/modifies OSMLocationDetails, because they will need to account for the possibility of nested members when working with OSM data. */ -struct OSMLocationDetails: Codable, Sendable { +public struct OSMLocationDetails: Codable, Sendable { var locations: [OSMLocationElement] init(locations: [OSMLocationElement]) { @@ -75,12 +75,12 @@ struct OSMLocationDetails: Codable, Sendable { case locations } - func encode(to encoder: Encoder) throws { + public func encode(to encoder: Encoder) throws { var container = encoder.container(keyedBy: CodingKeys.self) try container.encode(locations, forKey: .locations) } - init(from decoder: Decoder) throws { + public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) self.locations = try container.decode([OSMLocationElement].self, forKey: .locations) } diff --git a/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift b/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift index 900075c4..ed0c339b 100644 --- a/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift +++ b/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMap struct Workspace: Codable, Hashable { let id: Int diff --git a/IOSAccessAssessment/View/ARCameraView.swift b/IOSAccessAssessment/View/ARCameraView.swift index 5e356fe0..d4496b08 100644 --- a/IOSAccessAssessment/View/ARCameraView.swift +++ b/IOSAccessAssessment/View/ARCameraView.swift @@ -12,6 +12,7 @@ import Metal import CoreImage import MetalKit import CoreLocation +import PointNMap enum ARCameraViewConstants { enum Texts { diff --git a/IOSAccessAssessment/View/TestMode/TestCameraView.swift b/IOSAccessAssessment/View/TestMode/TestCameraView.swift index f79484f0..9cec20a4 100644 --- a/IOSAccessAssessment/View/TestMode/TestCameraView.swift +++ b/IOSAccessAssessment/View/TestMode/TestCameraView.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMap /** Additional constants unique to TestCameraView (not used in ARCameraView) From 926bf2825534387a08a4b80094a7155f0231d201 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 00:02:48 -0700 Subject: [PATCH 03/14] Move back main files to main app --- IOSAccessAssessment.xcodeproj/project.pbxproj | 122 ++++++++++----- .../Image/Depth/DepthFilter.swift | 10 +- .../Image/Depth/DepthFiltering.metal | 2 +- .../Segmentation/SegmentationARPipeline.swift | 1 + .../Geospatial/LocalizationProcessor.swift | 37 +++-- .../Geospatial/LocationHelpers.swift | 148 ++++++++++++++++++ .../Geospatial/LocationManager.swift | 20 +-- .../Shared/Definitions/RasterizeConfig.swift | 21 +++ .../PointNMap/Shared/Utils/Extensions.swift | 14 ++ 9 files changed, 307 insertions(+), 68 deletions(-) rename IOSAccessAssessment/{ => PointNMapShared/Sources/PointNMap}/Geospatial/LocalizationProcessor.swift (95%) create mode 100644 IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift rename IOSAccessAssessment/{ => PointNMapShared/Sources/PointNMap}/Geospatial/LocationManager.swift (82%) create mode 100644 IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift create mode 100644 IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index d824c8ab..623adc79 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 60; + objectVersion = 56; objects = { /* Begin PBXBuildFile section */ @@ -38,6 +38,13 @@ A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */; }; A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */; }; A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */; }; + A312FD682FA3308F0044808E /* LocationManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD662FA3308F0044808E /* LocationManager.swift */; }; + A312FD692FA3308F0044808E /* LocationHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD652FA3308F0044808E /* LocationHelpers.swift */; }; + A312FD6A2FA3308F0044808E /* LocalizationProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD642FA3308F0044808E /* LocalizationProcessor.swift */; }; + A312FD702FA330B80044808E /* RasterizeConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD6B2FA330B80044808E /* RasterizeConfig.swift */; }; + A312FD712FA330B80044808E /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD6D2FA330B80044808E /* Extensions.swift */; }; + A312FD742FA336020044808E /* DepthFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD722FA336020044808E /* DepthFilter.swift */; }; + A312FD752FA336020044808E /* DepthFiltering.metal in Sources */ = {isa = PBXBuildFile; fileRef = A312FD732FA336020044808E /* DepthFiltering.metal */; }; A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; @@ -57,8 +64,6 @@ A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E012F26FA2700B96610 /* LocationExtension.swift */; }; A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */; }; - A34509D22FA1A49F003157B0 /* PointNMap in Frameworks */ = {isa = PBXBuildFile; productRef = A34509D12FA1A49F003157B0 /* PointNMap */; }; - A34509D42FA1A49F003157B0 /* ShaderTypes in Frameworks */ = {isa = PBXBuildFile; productRef = A34509D32FA1A49F003157B0 /* ShaderTypes */; }; A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */; }; A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */; }; @@ -79,8 +84,6 @@ A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05192EDFB015003C26CF /* OSMPayload.swift */; }; A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051B2EDFB093003C26CF /* OSMNode.swift */; }; A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051D2EDFB099003C26CF /* OSMWay.swift */; }; - A364B5332F25576000325E5C /* DepthFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5322F25575D00325E5C /* DepthFilter.swift */; }; - A364B5352F25589B00325E5C /* DepthFiltering.metal in Sources */ = {isa = PBXBuildFile; fileRef = A364B5342F25589600325E5C /* DepthFiltering.metal */; }; A364B5D92F259AD700325E5C /* PlaneProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5D82F259AD600325E5C /* PlaneProcessor.swift */; }; A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DC2F259AF900325E5C /* WorldPoints.metal */; }; A364B5DF2F26DB5700325E5C /* WorldPointsProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */; }; @@ -261,6 +264,13 @@ A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunninSlopeExtension.swift; sourceTree = ""; }; A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CrossSlopeExtension.swift; sourceTree = ""; }; A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityExtension.swift; sourceTree = ""; }; + A312FD642FA3308F0044808E /* LocalizationProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocalizationProcessor.swift; sourceTree = ""; }; + A312FD652FA3308F0044808E /* LocationHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpers.swift; sourceTree = ""; }; + A312FD662FA3308F0044808E /* LocationManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationManager.swift; sourceTree = ""; }; + A312FD6B2FA330B80044808E /* RasterizeConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RasterizeConfig.swift; sourceTree = ""; }; + A312FD6D2FA330B80044808E /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = ""; }; + A312FD722FA336020044808E /* DepthFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthFilter.swift; sourceTree = ""; }; + A312FD732FA336020044808E /* DepthFiltering.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = DepthFiltering.metal; sourceTree = ""; }; A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MTLTextureUtils.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; @@ -300,8 +310,6 @@ A35E05192EDFB015003C26CF /* OSMPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMPayload.swift; sourceTree = ""; }; A35E051B2EDFB093003C26CF /* OSMNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMNode.swift; sourceTree = ""; }; A35E051D2EDFB099003C26CF /* OSMWay.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMWay.swift; sourceTree = ""; }; - A364B5322F25575D00325E5C /* DepthFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthFilter.swift; sourceTree = ""; }; - A364B5342F25589600325E5C /* DepthFiltering.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = DepthFiltering.metal; sourceTree = ""; }; A364B5D82F259AD600325E5C /* PlaneProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneProcessor.swift; sourceTree = ""; }; A364B5DC2F259AF900325E5C /* WorldPoints.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = WorldPoints.metal; sourceTree = ""; }; A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsProcessor.swift; sourceTree = ""; }; @@ -424,10 +432,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - A34509D22FA1A49F003157B0 /* PointNMap in Frameworks */, A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */, A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */, - A34509D42FA1A49F003157B0 /* ShaderTypes in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -471,7 +477,7 @@ 3222F9182B622DFD0019A079 /* IOSAccessAssessment */ = { isa = PBXGroup; children = ( - A34509DC2FA31D9C003157B0 /* New Group */, + A312FD612FA330620044808E /* PointNMapShared */, A3FE166A2E18DD2A00DAE5BE /* Info.plist */, A39C9F392DD9B01200455E45 /* TDEI */, A30801512EC0984F00B1BA3A /* AccessibilityFeature */, @@ -679,6 +685,66 @@ path = Location; sourceTree = ""; }; + A312FD612FA330620044808E /* PointNMapShared */ = { + isa = PBXGroup; + children = ( + A312FD622FA3307D0044808E /* Sources */, + ); + path = PointNMapShared; + sourceTree = ""; + }; + A312FD622FA3307D0044808E /* Sources */ = { + isa = PBXGroup; + children = ( + A312FD632FA330840044808E /* PointNMap */, + ); + path = Sources; + sourceTree = ""; + }; + A312FD632FA330840044808E /* PointNMap */ = { + isa = PBXGroup; + children = ( + A312FD672FA3308F0044808E /* Geospatial */, + A312FD6F2FA330B80044808E /* Shared */, + ); + path = PointNMap; + sourceTree = ""; + }; + A312FD672FA3308F0044808E /* Geospatial */ = { + isa = PBXGroup; + children = ( + A312FD642FA3308F0044808E /* LocalizationProcessor.swift */, + A312FD652FA3308F0044808E /* LocationHelpers.swift */, + A312FD662FA3308F0044808E /* LocationManager.swift */, + ); + path = Geospatial; + sourceTree = ""; + }; + A312FD6C2FA330B80044808E /* Definitions */ = { + isa = PBXGroup; + children = ( + A312FD6B2FA330B80044808E /* RasterizeConfig.swift */, + ); + path = Definitions; + sourceTree = ""; + }; + A312FD6E2FA330B80044808E /* Utils */ = { + isa = PBXGroup; + children = ( + A312FD6D2FA330B80044808E /* Extensions.swift */, + ); + path = Utils; + sourceTree = ""; + }; + A312FD6F2FA330B80044808E /* Shared */ = { + isa = PBXGroup; + children = ( + A312FD6C2FA330B80044808E /* Definitions */, + A312FD6E2FA330B80044808E /* Utils */, + ); + path = Shared; + sourceTree = ""; + }; A31A1E772EAC49E3008B30B7 /* UI */ = { isa = PBXGroup; children = ( @@ -816,13 +882,6 @@ path = Utils; sourceTree = ""; }; - A34509DC2FA31D9C003157B0 /* New Group */ = { - isa = PBXGroup; - children = ( - ); - path = "New Group"; - sourceTree = ""; - }; A34509E22FA31DCC003157B0 /* Geospatial */ = { isa = PBXGroup; children = ( @@ -1021,8 +1080,8 @@ A38338C42EDAF3DC00F1A402 /* Depth */ = { isa = PBXGroup; children = ( - A364B5342F25589600325E5C /* DepthFiltering.metal */, - A364B5322F25575D00325E5C /* DepthFilter.swift */, + A312FD722FA336020044808E /* DepthFilter.swift */, + A312FD732FA336020044808E /* DepthFiltering.metal */, A38338C52EDAF3E500F1A402 /* DepthMapProcessor.swift */, ); path = Depth; @@ -1385,7 +1444,6 @@ mainGroup = 3222F90D2B622DFD0019A079; packageReferences = ( CAF812C12CFA0FD400D44B84 /* XCRemoteSwiftPackageReference "swift-collections" */, - A34509D02FA1A49F003157B0 /* XCLocalSwiftPackageReference "../PointNMap" */, ); productRefGroup = 3222F9172B622DFD0019A079 /* Products */; projectDirPath = ""; @@ -1450,7 +1508,6 @@ A32943502EE80EC400C4C1BC /* OSMRelation.swift in Sources */, A308015C2EC09BB700B1BA3A /* CityscapesClassConfig.swift in Sources */, A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */, - A364B5352F25589B00325E5C /* DepthFiltering.metal in Sources */, A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */, A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */, A35547C42EC1AF5700F43AFD /* CaptureData.swift in Sources */, @@ -1488,7 +1545,6 @@ A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */, A3FE16612E18BA5900DAE5BE /* RGBCoder.swift in Sources */, CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */, - A364B5332F25576000325E5C /* DepthFilter.swift in Sources */, A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */, A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */, A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */, @@ -1545,6 +1601,8 @@ A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */, A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, A30801502EC0926800B1BA3A /* ContourUtils.swift in Sources */, + A312FD742FA336020044808E /* DepthFilter.swift in Sources */, + A312FD752FA336020044808E /* DepthFiltering.metal in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */, @@ -1561,6 +1619,9 @@ A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, + A312FD682FA3308F0044808E /* LocationManager.swift in Sources */, + A312FD692FA3308F0044808E /* LocationHelpers.swift in Sources */, + A312FD6A2FA3308F0044808E /* LocalizationProcessor.swift in Sources */, A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, @@ -1581,6 +1642,8 @@ A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3DC22EF2DCF119A0020CE84 /* HomographyTransformFilter.swift in Sources */, A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */, + A312FD702FA330B80044808E /* RasterizeConfig.swift in Sources */, + A312FD712FA330B80044808E /* Extensions.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, @@ -1970,13 +2033,6 @@ }; /* End XCConfigurationList section */ -/* Begin XCLocalSwiftPackageReference section */ - A34509D02FA1A49F003157B0 /* XCLocalSwiftPackageReference "../PointNMap" */ = { - isa = XCLocalSwiftPackageReference; - relativePath = ../PointNMap; - }; -/* End XCLocalSwiftPackageReference section */ - /* Begin XCRemoteSwiftPackageReference section */ A3C22FD62CF2F0C300533BF7 /* XCRemoteSwiftPackageReference "swift-collections" */ = { isa = XCRemoteSwiftPackageReference; @@ -1997,14 +2053,6 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - A34509D12FA1A49F003157B0 /* PointNMap */ = { - isa = XCSwiftPackageProductDependency; - productName = PointNMap; - }; - A34509D32FA1A49F003157B0 /* ShaderTypes */ = { - isa = XCSwiftPackageProductDependency; - productName = ShaderTypes; - }; A3C22FD72CF2F0C300533BF7 /* DequeModule */ = { isa = XCSwiftPackageProductDependency; package = A3C22FD62CF2F0C300533BF7 /* XCRemoteSwiftPackageReference "swift-collections" */; diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift index f67ffa6e..46054419 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift @@ -10,14 +10,14 @@ import Metal import CoreImage import MetalKit -enum DepthFilterError: Error, LocalizedError { +public enum DepthFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -36,7 +36,7 @@ enum DepthFilterError: Error, LocalizedError { /** DepthFilter applies depth-based filtering to images using Metal. */ -struct DepthFilter { +public struct DepthFilter { private let device: MTLDevice private let commandQueue: MTLCommandQueue private let pipeline: MTLComputePipelineState @@ -45,7 +45,7 @@ struct DepthFilter { private let ciContext: CIContext private let outputColorSpace: CGColorSpace? = nil //CGColorSpace(name: CGColorSpace.linearGray) - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw DepthFilterError.metalInitializationFailed @@ -63,7 +63,7 @@ struct DepthFilter { self.pipeline = pipeline } - func apply( + public func apply( to inputImage: CIImage, depthImage: CIImage, depthMinThreshold: Float, depthMaxThreshold: Float ) throws -> CIImage { diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal index 175d01c2..711493b8 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal +++ b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal @@ -8,7 +8,7 @@ #include #include using namespace metal; -#import "ShaderTypes.h" +#import "../../../ShaderTypes.h" extern "C" kernel void depthFilteringKernel( texture2d inputTexture [[texture(0)]], diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift index 68a24967..0a870f8c 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -11,6 +11,7 @@ import CoreML import OrderedCollections import simd +import PointNMap enum SegmentationARPipelineError: Error, LocalizedError { case isProcessingTrue diff --git a/IOSAccessAssessment/Geospatial/LocalizationProcessor.swift b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift similarity index 95% rename from IOSAccessAssessment/Geospatial/LocalizationProcessor.swift rename to IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift index 72245555..c10366d0 100644 --- a/IOSAccessAssessment/Geospatial/LocalizationProcessor.swift +++ b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift @@ -10,16 +10,21 @@ import CoreImage import CoreLocation import simd -struct PointWithDepth: Sendable, Equatable, Hashable, Codable { - let point: CGPoint - let depth: Float +public struct PointWithDepth: Sendable, Equatable, Hashable, Codable { + public let point: CGPoint + public let depth: Float + + public init(point: CGPoint, depth: Float) { + self.point = point + self.depth = depth + } } -enum LocalizationProcessorError: Error, LocalizedError { +public enum LocalizationProcessorError: Error, LocalizedError { case invalidBounds case divisionByZero - var errorDescription: String? { + public var errorDescription: String? { switch self { case .invalidBounds: return "The provided bounds for localization are invalid." @@ -29,9 +34,11 @@ enum LocalizationProcessorError: Error, LocalizedError { } } -struct LocalizationProcessor { +public struct LocalizationProcessor { let RADIUS = 6378137.0 + public init() { } + /** Calculate the location of an object at a given point with depth in the image. @@ -49,7 +56,7 @@ struct LocalizationProcessor { - Note: Assumes that ARKit has the world alignment set to `ARWorldAlignment.gravityAndHeading`. */ - func calculateLocation( + public func calculateLocation( point: CGPoint, depth: Float, imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, @@ -68,7 +75,7 @@ struct LocalizationProcessor { ) } - func calculateLocation( + public func calculateLocation( worldPoint: SIMD3, cameraTransform: simd_float4x4, deviceLocation: CLLocationCoordinate2D @@ -90,7 +97,7 @@ struct LocalizationProcessor { - NOTE: This method is primarily for testing and debugging purposes. */ - func calculateDelta( + public func calculateDelta( point: CGPoint, depth: Float, imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -103,7 +110,7 @@ struct LocalizationProcessor { return SIMD2( -delta.z, delta.x ) } - func calculateDelta( + public func calculateDelta( worldPoint: SIMD3, cameraTransform: simd_float4x4 ) -> SIMD2 { @@ -114,7 +121,7 @@ struct LocalizationProcessor { return SIMD2( -delta.z, delta.x ) } - func calculateLocation( + public func calculateLocation( latitudeDelta: Float, longitudeDelta: Float, deviceLocation: CLLocationCoordinate2D ) -> CLLocationCoordinate2D { @@ -146,7 +153,7 @@ struct LocalizationProcessor { ) } - func getDeltaFromPoint( + public func getDeltaFromPoint( point: CGPoint, depth: Float, imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -213,7 +220,7 @@ extension LocalizationProcessor { TODO: Improve upon this basic width calculation method. */ - func calculateWidth( + public func calculateWidth( trapezoidBoundsWithDepth: [PointWithDepth], imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -262,7 +269,7 @@ extension LocalizationProcessor { TODO: Improve upon this basic slope calculation method. */ - func calculateRunningSlope( + public func calculateRunningSlope( trapezoidBoundsWithDepth: [PointWithDepth], imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -300,7 +307,7 @@ extension LocalizationProcessor { return slopeInDegrees } - func calculateCrossSlope( + public func calculateCrossSlope( trapezoidBoundsWithDepth: [PointWithDepth], imageSize: CGSize, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift new file mode 100644 index 00000000..5c322cea --- /dev/null +++ b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift @@ -0,0 +1,148 @@ +// +// LocationHelpers.swift +// IOSAccessAssessment +// +// Created by Himanshu on 3/30/26. +// + +import CoreLocation +import UIKit +import MapKit + +public struct BBox { + public let minLat: Double + public let maxLat: Double + public let minLon: Double + public let maxLon: Double + + public init(minLat: Double, maxLat: Double, minLon: Double, maxLon: Double) { + self.minLat = minLat + self.maxLat = maxLat + self.minLon = minLon + self.maxLon = maxLon + } + + public func toQueryString() -> String { + return "\(minLon.roundedTo7Digits()),\(minLat.roundedTo7Digits()),\(maxLon.roundedTo7Digits()),\(maxLat.roundedTo7Digits())" + } +} + +public struct LocationHelpers { + /** + Calculates a bounding box around a given location with a specified radius. The bounding box is represented by its minimum and maximum latitude and longitude values. + */ + public static func boundingBoxAroundLocation(location: CLLocationCoordinate2D, radius: CLLocationDistance) -> BBox { + let region = MKCoordinateRegion(center: location, latitudinalMeters: radius, longitudinalMeters: radius) + let center = region.center + let span = region.span + let minLat = center.latitude - span.latitudeDelta + let maxLat = center.latitude + span.latitudeDelta + let minLon = center.longitude - span.longitudeDelta + let maxLon = center.longitude + span.longitudeDelta + + return BBox(minLat: minLat, maxLat: maxLat, minLon: minLon, maxLon: maxLon) + } + + public struct MKDistanceHelpers { + public static func distanceBetweenPoints(srcPoint: MKMapPoint, dstPoint: MKMapPoint) -> Double { + return sqrt(pow(srcPoint.x - dstPoint.x, 2) + pow(srcPoint.y - dstPoint.y, 2)) + } + + /** + Calculates the shortest distance from a point to a line segment defined by two endpoints. The distance is returned in the same units as the coordinates (e.g., meters if using map points). + + - Parameters: + - srcPoint: The point from which the distance to the line segment is calculated. + - lineStart: The starting point of the line segment. + - lineEnd: The ending point of the line segment. + + - Procedure: + If lineStart = A and lineEnd = B: + Line Segment can be defined by L(t) = A + t(B - A), where t is a scalar in [0, 1]. + The distance from a point P to the line segment AB can be found by: + 1. Finding the projection of P onto the line defined by A and B, which gives us a point Q. + 2. If Q lies within the segment AB (i.e., t is between 0 and 1), then the distance from P to AB is the distance from P to Q. + 3. If Q does not lie within the segment AB, then the distance from P to AB is the minimum of the distances from P to A and P to B. + */ + public static func distanceFromPointToLineSegment( + srcPoint: MKMapPoint, lineStart: MKMapPoint, lineEnd: MKMapPoint + ) -> Double? { + let AP = MKMapPoint(x: srcPoint.x - lineStart.x, y: srcPoint.y - lineStart.y) + let AB = MKMapPoint(x: lineEnd.x - lineStart.x, y: lineEnd.y - lineStart.y) + let AB_length_squared = AB.x * AB.x + AB.y * AB.y + guard AB_length_squared != 0 else { + // lineStart and lineEnd are the same point, return distance from srcPoint to this point + return sqrt(AP.x * AP.x + AP.y * AP.y) + } + let t = (AP.x * AB.x + AP.y * AB.y) / AB_length_squared + + if t > 0 && t <= 1 { + // Projection falls on the line segment + let projection = MKMapPoint(x: lineStart.x + t * AB.x, y: lineStart.y + t * AB.y) + let distance = sqrt(pow(srcPoint.x - projection.x, 2) + pow(srcPoint.y - projection.y, 2)) + return distance + } else { + // Projection falls outside the line segment, return the minimum distance to the endpoints + // Can check the value of t to determine which endpoint is closer, but this also works without that check. + let distanceToStart = sqrt(pow(srcPoint.x - lineStart.x, 2) + pow(srcPoint.y - lineStart.y, 2)) + let distanceToEnd = sqrt(pow(srcPoint.x - lineEnd.x, 2) + pow(srcPoint.y - lineEnd.y, 2)) + return min(distanceToStart, distanceToEnd) + } + } + + public static func checkPointInsidePolygon( + srcPoint: MKMapPoint, polygonPoints: [MKMapPoint] + ) -> Bool { + var insideCounter = 0 + var i = 0 + for j in 1...polygonPoints.count { + let pi = polygonPoints[i % polygonPoints.count] + let pj = polygonPoints[j % polygonPoints.count] + if srcPoint.y > min(pi.y, pj.y) && srcPoint.y <= max(pi.y, pj.y) && + srcPoint.x <= max(pi.x, pj.x) && pi.y != pj.y { + let xinters = (srcPoint.y - pi.y) * (pj.x - pi.x) / (pj.y - pi.y) + pi.x + if (pi.x == pj.x || srcPoint.x <= xinters) { + insideCounter += 1 + } + } + i = j + } + return insideCounter % 2 != 0 + } + + /** + Calculates the shortest distance from a point to a polygon defined by its vertices. The distance is returned in the same units as the coordinates (e.g., meters if using map points). + + - Parameters: + - srcPoint: The point from which the distance to the polygon is calculated. + - polygonPoints: An array of points representing the vertices of the polygon, ordered sequentially. + + - Procedure: + 1. First, check if the point is inside the polygon using a point-in-polygon test (e.g., ray-casting algorithm). If the point is inside, return distance 0. + 2. If the point is outside the polygon, iterate through each edge of the polygon (defined by consecutive vertices) and calculate the distance from the point to each edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges is returned as the distance from the point to the polygon. + */ + public static func distanceFromPointToPolygon( + srcPoint: MKMapPoint, polygonPoints: [MKMapPoint] + ) -> Double? { + guard polygonPoints.count >= 3 else { + // Not a valid polygon + return nil + } + // Check if the point is inside the polygon using ray-casting algorithm + let isInside = checkPointInsidePolygon(srcPoint: srcPoint, polygonPoints: polygonPoints) + if isInside { + return 0.0 + } + var minDistance: Double = Double.infinity + for i in 0.. 0 else { return @@ -99,7 +99,7 @@ class LocationManager: NSObject, ObservableObject, CLLocationManagerDelegate { } } - func stopLocationUpdates() { + public func stopLocationUpdates() { locationManager.stopUpdatingLocation() } } diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift new file mode 100644 index 00000000..6e4d370a --- /dev/null +++ b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift @@ -0,0 +1,21 @@ +// +// RasterizeConfig.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/18/25. +// +import UIKit + +public struct RasterizeConfig { + public let draw: Bool + public let color: UIColor? + public let width: CGFloat + public let alpha: CGFloat + + public init(draw: Bool = true, color: UIColor?, width: CGFloat = 2.0, alpha: CGFloat = 1.0) { + self.draw = draw + self.color = color + self.width = width + self.alpha = alpha + } +} diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift new file mode 100644 index 00000000..310e9114 --- /dev/null +++ b/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift @@ -0,0 +1,14 @@ +// +// Extensions.swift +// IOSAccessAssessment +// +// Created by Himanshu on 3/28/26. +// + +import Foundation + +public extension Double { + public func roundedTo7Digits() -> Double { + (self * 1_000_0000).rounded() / 1_000_0000 + } +} From a24b626b3aa284e1a892d90c71e15f213564c09d Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 11:14:19 -0700 Subject: [PATCH 04/14] Create new framework and start adding files as added to spm package --- IOSAccessAssessment.xcodeproj/project.pbxproj | 427 ++++++++++++++---- .../xcschemes/IOSAccessAssessment.xcscheme | 11 + .../IOSAccessAssessment_Debug.xcscheme | 13 + .../AttributeEstimationPipeline.swift | 8 +- .../OtherAttributeExtensionLegacy.swift | 2 +- .../Location/LocationDetails.swift} | 18 +- .../Location/LocationExtension.swift | 2 +- .../Location/LocationFromImageExtension.swift | 18 +- .../Location/LocationFromMeshExtension.swift | 6 +- .../OtherAttributes/CrossSlopeExtension.swift | 1 + .../RunninSlopeExtension.swift | 1 + .../SurfaceIntegrityExtension.swift | 1 + .../OtherAttributes/WidthExtension.swift | 1 + .../AccessibilityFeatureAttribute.swift | 369 +-------------- .../Config/AccessibilityFeatureConfig.swift | 1 + .../Definitions/AccessibilityFeature.swift | 5 +- .../EditableAccessibilityFeature.swift | 7 +- .../MappedAccessibilityFeature.swift | 7 +- .../Annotation/AnnotationImageManager.swift | 2 +- .../Contour/ContourFeatureRasterizer.swift | 2 +- .../Mesh/Utils/MeshRasterizer.swift | 2 +- .../Projection/Plane/PlaneRasterizer.swift | 2 +- .../SurfaceIntegrityProcessor.swift | 1 + .../Geospatial/LocationHelpersExtension.swift | 30 +- .../IOSAccessAssessmentApp.swift | 1 + .../AccessibilityFeatureSnapshot.swift | 3 +- .../DamageDetectionRasterizer.swift | 2 +- .../Segmentation/SegmentationARPipeline.swift | 2 +- .../Definitions/CurrentMappingData.swift | 28 +- .../Shared/SharedAppData.swift | 2 +- IOSAccessAssessment/TDEI/OSW/OSWElement.swift | 1 + .../TDEI/OSW/OSWLineString.swift | 1 + .../TDEI/OSW/OSWMultiPolygon.swift | 1 + IOSAccessAssessment/TDEI/OSW/OSWPoint.swift | 1 + IOSAccessAssessment/TDEI/OSW/OSWPolygon.swift | 1 + .../TDEI/Services/WorkspaceService.swift | 2 +- .../APIChangesetUploadController.swift | 5 +- IOSAccessAssessment/View/ARCameraView.swift | 2 +- .../SubView/AnnotationFeatureDetailView.swift | 1 + .../View/TestMode/TestCameraView.swift | 2 +- .../PointNMapShared.docc/PointNMapShared.md | 13 + PointNMapShared/PointNMapShared.swift | 9 + .../AccessibilityFeatureAttribute.swift | 352 +++++++++++++++ .../Attributes/CategoricalAttribute.swift | 102 +++++ .../Geospatial/LocalizationProcessor.swift | 0 .../Geospatial/LocationHelpers.swift | 0 .../Geospatial/LocationManager.swift | 0 .../Shared/Definitions/RasterizeConfig.swift | 0 .../PointNMap/Shared/Utils/Extensions.swift | 2 +- .../PointNMapSharedTests.swift | 17 + 50 files changed, 969 insertions(+), 518 deletions(-) rename IOSAccessAssessment/{TDEI/OSM/OSMLocation.swift => AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift} (67%) create mode 100644 PointNMapShared/PointNMapShared.docc/PointNMapShared.md create mode 100644 PointNMapShared/PointNMapShared.swift create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/CategoricalAttribute.swift rename {IOSAccessAssessment/PointNMapShared => PointNMapShared}/Sources/PointNMap/Geospatial/LocalizationProcessor.swift (100%) rename {IOSAccessAssessment/PointNMapShared => PointNMapShared}/Sources/PointNMap/Geospatial/LocationHelpers.swift (100%) rename {IOSAccessAssessment/PointNMapShared => PointNMapShared}/Sources/PointNMap/Geospatial/LocationManager.swift (100%) rename {IOSAccessAssessment/PointNMapShared => PointNMapShared}/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift (100%) rename {IOSAccessAssessment/PointNMapShared => PointNMapShared}/Sources/PointNMap/Shared/Utils/Extensions.swift (80%) create mode 100644 PointNMapSharedTests/PointNMapSharedTests.swift diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 623adc79..9cc41e71 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 56; + objectVersion = 70; objects = { /* Begin PBXBuildFile section */ @@ -38,13 +38,11 @@ A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */; }; A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */; }; A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */; }; - A312FD682FA3308F0044808E /* LocationManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD662FA3308F0044808E /* LocationManager.swift */; }; - A312FD692FA3308F0044808E /* LocationHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD652FA3308F0044808E /* LocationHelpers.swift */; }; - A312FD6A2FA3308F0044808E /* LocalizationProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD642FA3308F0044808E /* LocalizationProcessor.swift */; }; - A312FD702FA330B80044808E /* RasterizeConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD6B2FA330B80044808E /* RasterizeConfig.swift */; }; - A312FD712FA330B80044808E /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD6D2FA330B80044808E /* Extensions.swift */; }; A312FD742FA336020044808E /* DepthFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD722FA336020044808E /* DepthFilter.swift */; }; A312FD752FA336020044808E /* DepthFiltering.metal in Sources */ = {isa = PBXBuildFile; fileRef = A312FD732FA336020044808E /* DepthFiltering.metal */; }; + A312FD862FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; + A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; + A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; @@ -88,7 +86,6 @@ A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DC2F259AF900325E5C /* WorldPoints.metal */; }; A364B5DF2F26DB5700325E5C /* WorldPointsProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */; }; A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A36C6E012E134CE600A86004 /* bisenetv2_35_640_640.mlpackage */; }; - A374B4AA2F8C8B4D003E030D /* CategoricalAttribute.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374B4A92F8C8B40003E030D /* CategoricalAttribute.swift */; }; A374B4AC2F8EF654003E030D /* CurrentMappingData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */; }; A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */; }; A37C3C182F3141FF001F4248 /* Plane.metal in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C172F3141F9001F4248 /* Plane.metal */; }; @@ -167,7 +164,7 @@ A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E532F67A40B00F515E6 /* UtilityExtension.swift */; }; A3EE6EFC2F69285600F515E6 /* LocationFromImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */; }; A3EE6EFE2F69287F00F515E6 /* LocationFromMeshExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */; }; - A3EE6F002F6A29F500F515E6 /* OSMLocation.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFF2F6A29F300F515E6 /* OSMLocation.swift */; }; + A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */; }; A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3F27DB22D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage */; }; A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3F38C4B2D38A2C500900547 /* DepthModel.swift */; }; A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */ = {isa = PBXBuildFile; productRef = A3FCC2FA2DA4E1880037AB43 /* OrderedCollections */; }; @@ -213,6 +210,27 @@ remoteGlobalIDString = 3222F9152B622DFD0019A079; remoteInfo = IOSAccessAssessment; }; + A312FD872FA3391C0044808E /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 3222F90E2B622DFD0019A079 /* Project object */; + proxyType = 1; + remoteGlobalIDString = A312FD7A2FA3391B0044808E; + remoteInfo = PointNMapShared; + }; + A312FD892FA3391C0044808E /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 3222F90E2B622DFD0019A079 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 3222F9152B622DFD0019A079; + remoteInfo = IOSAccessAssessment; + }; + A312FD8E2FA3391C0044808E /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 3222F90E2B622DFD0019A079 /* Project object */; + proxyType = 1; + remoteGlobalIDString = A312FD7A2FA3391B0044808E; + remoteInfo = PointNMapShared; + }; /* End PBXContainerItemProxy section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -222,6 +240,7 @@ dstPath = ""; dstSubfolderSpec = 10; files = ( + A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */, ); name = "Embed Frameworks"; runOnlyForDeploymentPostprocessing = 0; @@ -264,13 +283,10 @@ A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunninSlopeExtension.swift; sourceTree = ""; }; A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CrossSlopeExtension.swift; sourceTree = ""; }; A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityExtension.swift; sourceTree = ""; }; - A312FD642FA3308F0044808E /* LocalizationProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocalizationProcessor.swift; sourceTree = ""; }; - A312FD652FA3308F0044808E /* LocationHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpers.swift; sourceTree = ""; }; - A312FD662FA3308F0044808E /* LocationManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationManager.swift; sourceTree = ""; }; - A312FD6B2FA330B80044808E /* RasterizeConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RasterizeConfig.swift; sourceTree = ""; }; - A312FD6D2FA330B80044808E /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = ""; }; A312FD722FA336020044808E /* DepthFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthFilter.swift; sourceTree = ""; }; A312FD732FA336020044808E /* DepthFiltering.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = DepthFiltering.metal; sourceTree = ""; }; + A312FD7B2FA3391B0044808E /* PointNMapShared.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShared.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PointNMapSharedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MTLTextureUtils.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; @@ -314,7 +330,6 @@ A364B5DC2F259AF900325E5C /* WorldPoints.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = WorldPoints.metal; sourceTree = ""; }; A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsProcessor.swift; sourceTree = ""; }; A36C6E012E134CE600A86004 /* bisenetv2_35_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_35_640_640.mlpackage; sourceTree = ""; }; - A374B4A92F8C8B40003E030D /* CategoricalAttribute.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CategoricalAttribute.swift; sourceTree = ""; }; A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappingData.swift; sourceTree = ""; }; A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMChangesetUploadResponseElement.swift; sourceTree = ""; }; A37C3C172F3141F9001F4248 /* Plane.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Plane.metal; sourceTree = ""; }; @@ -397,7 +412,7 @@ A3EE6E532F67A40B00F515E6 /* UtilityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UtilityExtension.swift; sourceTree = ""; }; A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationFromImageExtension.swift; sourceTree = ""; }; A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationFromMeshExtension.swift; sourceTree = ""; }; - A3EE6EFF2F6A29F300F515E6 /* OSMLocation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMLocation.swift; sourceTree = ""; }; + A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationDetails.swift; sourceTree = ""; }; A3F27DB22D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = DepthAnythingV2SmallF16.mlpackage; sourceTree = ""; }; A3F38C4B2D38A2C500900547 /* DepthModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthModel.swift; sourceTree = ""; }; A3FE16602E18BA5600DAE5BE /* RGBCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RGBCoder.swift; sourceTree = ""; }; @@ -427,6 +442,36 @@ DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GrayscaleToColorFilter.swift; sourceTree = ""; }; /* End PBXFileReference section */ +/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ + A312FDA82FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Sources/PointNMap/Geospatial/LocalizationProcessor.swift, + Sources/PointNMap/Geospatial/LocationHelpers.swift, + Sources/PointNMap/Geospatial/LocationManager.swift, + Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift, + Sources/PointNMap/Shared/Utils/Extensions.swift, + ); + target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; + }; + A312FDA92FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Sources/PointNMap/Geospatial/LocalizationProcessor.swift, + Sources/PointNMap/Geospatial/LocationHelpers.swift, + Sources/PointNMap/Geospatial/LocationManager.swift, + Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift, + Sources/PointNMap/Shared/Utils/Extensions.swift, + ); + target = A312FD7A2FA3391B0044808E /* PointNMapShared */; + }; +/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ + +/* Begin PBXFileSystemSynchronizedRootGroup section */ + A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FDA82FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDA92FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; + A312FD8B2FA3391C0044808E /* PointNMapSharedTests */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = PointNMapSharedTests; sourceTree = ""; }; +/* End PBXFileSystemSynchronizedRootGroup section */ + /* Begin PBXFrameworksBuildPhase section */ 3222F9132B622DFD0019A079 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; @@ -434,6 +479,7 @@ files = ( A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */, A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */, + A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -451,6 +497,21 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FD782FA3391B0044808E /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + A312FD822FA3391C0044808E /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + A312FD862FA3391C0044808E /* PointNMapShared.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ @@ -460,6 +521,8 @@ 3222F9182B622DFD0019A079 /* IOSAccessAssessment */, 3222F9292B622E0A0019A079 /* IOSAccessAssessmentTests */, 3222F9332B622E0A0019A079 /* IOSAccessAssessmentUITests */, + A312FD7C2FA3391B0044808E /* PointNMapShared */, + A312FD8B2FA3391C0044808E /* PointNMapSharedTests */, 3222F9172B622DFD0019A079 /* Products */, ); sourceTree = ""; @@ -470,6 +533,8 @@ 3222F9162B622DFD0019A079 /* IOSAccessAssessment.app */, 3222F9262B622E0A0019A079 /* IOSAccessAssessmentTests.xctest */, 3222F9302B622E0A0019A079 /* IOSAccessAssessmentUITests.xctest */, + A312FD7B2FA3391B0044808E /* PointNMapShared.framework */, + A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */, ); name = Products; sourceTree = ""; @@ -477,7 +542,6 @@ 3222F9182B622DFD0019A079 /* IOSAccessAssessment */ = { isa = PBXGroup; children = ( - A312FD612FA330620044808E /* PointNMapShared */, A3FE166A2E18DD2A00DAE5BE /* Info.plist */, A39C9F392DD9B01200455E45 /* TDEI */, A30801512EC0984F00B1BA3A /* AccessibilityFeature */, @@ -681,70 +745,11 @@ A3431E012F26FA2700B96610 /* LocationExtension.swift */, A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */, A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */, + A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */, ); path = Location; sourceTree = ""; }; - A312FD612FA330620044808E /* PointNMapShared */ = { - isa = PBXGroup; - children = ( - A312FD622FA3307D0044808E /* Sources */, - ); - path = PointNMapShared; - sourceTree = ""; - }; - A312FD622FA3307D0044808E /* Sources */ = { - isa = PBXGroup; - children = ( - A312FD632FA330840044808E /* PointNMap */, - ); - path = Sources; - sourceTree = ""; - }; - A312FD632FA330840044808E /* PointNMap */ = { - isa = PBXGroup; - children = ( - A312FD672FA3308F0044808E /* Geospatial */, - A312FD6F2FA330B80044808E /* Shared */, - ); - path = PointNMap; - sourceTree = ""; - }; - A312FD672FA3308F0044808E /* Geospatial */ = { - isa = PBXGroup; - children = ( - A312FD642FA3308F0044808E /* LocalizationProcessor.swift */, - A312FD652FA3308F0044808E /* LocationHelpers.swift */, - A312FD662FA3308F0044808E /* LocationManager.swift */, - ); - path = Geospatial; - sourceTree = ""; - }; - A312FD6C2FA330B80044808E /* Definitions */ = { - isa = PBXGroup; - children = ( - A312FD6B2FA330B80044808E /* RasterizeConfig.swift */, - ); - path = Definitions; - sourceTree = ""; - }; - A312FD6E2FA330B80044808E /* Utils */ = { - isa = PBXGroup; - children = ( - A312FD6D2FA330B80044808E /* Extensions.swift */, - ); - path = Utils; - sourceTree = ""; - }; - A312FD6F2FA330B80044808E /* Shared */ = { - isa = PBXGroup; - children = ( - A312FD6C2FA330B80044808E /* Definitions */, - A312FD6E2FA330B80044808E /* Utils */, - ); - path = Shared; - sourceTree = ""; - }; A31A1E772EAC49E3008B30B7 /* UI */ = { isa = PBXGroup; children = ( @@ -949,7 +954,6 @@ A35E051B2EDFB093003C26CF /* OSMNode.swift */, A35E051D2EDFB099003C26CF /* OSMWay.swift */, A329434F2EE80EC200C4C1BC /* OSMRelation.swift */, - A3EE6EFF2F6A29F300F515E6 /* OSMLocation.swift */, A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */, A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */, ); @@ -1072,7 +1076,6 @@ isa = PBXGroup; children = ( A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttribute.swift */, - A374B4A92F8C8B40003E030D /* CategoricalAttribute.swift */, ); path = Attributes; sourceTree = ""; @@ -1354,6 +1357,16 @@ }; /* End PBXGroup section */ +/* Begin PBXHeadersBuildPhase section */ + A312FD762FA3391B0044808E /* Headers */ = { + isa = PBXHeadersBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXHeadersBuildPhase section */ + /* Begin PBXNativeTarget section */ 3222F9152B622DFD0019A079 /* IOSAccessAssessment */ = { isa = PBXNativeTarget; @@ -1367,6 +1380,7 @@ buildRules = ( ); dependencies = ( + A312FD8F2FA3391C0044808E /* PBXTargetDependency */, ); name = IOSAccessAssessment; productName = IOSAccessAssessment; @@ -1409,6 +1423,53 @@ productReference = 3222F9302B622E0A0019A079 /* IOSAccessAssessmentUITests.xctest */; productType = "com.apple.product-type.bundle.ui-testing"; }; + A312FD7A2FA3391B0044808E /* PointNMapShared */ = { + isa = PBXNativeTarget; + buildConfigurationList = A312FD922FA3391C0044808E /* Build configuration list for PBXNativeTarget "PointNMapShared" */; + buildPhases = ( + A312FD762FA3391B0044808E /* Headers */, + A312FD772FA3391B0044808E /* Sources */, + A312FD782FA3391B0044808E /* Frameworks */, + A312FD792FA3391B0044808E /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + A312FD7C2FA3391B0044808E /* PointNMapShared */, + ); + name = PointNMapShared; + packageProductDependencies = ( + ); + productName = PointNMapShared; + productReference = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; + productType = "com.apple.product-type.framework"; + }; + A312FD842FA3391C0044808E /* PointNMapSharedTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = A312FD952FA3391C0044808E /* Build configuration list for PBXNativeTarget "PointNMapSharedTests" */; + buildPhases = ( + A312FD812FA3391C0044808E /* Sources */, + A312FD822FA3391C0044808E /* Frameworks */, + A312FD832FA3391C0044808E /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + A312FD882FA3391C0044808E /* PBXTargetDependency */, + A312FD8A2FA3391C0044808E /* PBXTargetDependency */, + ); + fileSystemSynchronizedGroups = ( + A312FD8B2FA3391C0044808E /* PointNMapSharedTests */, + ); + name = PointNMapSharedTests; + packageProductDependencies = ( + ); + productName = PointNMapSharedTests; + productReference = A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ @@ -1416,7 +1477,7 @@ isa = PBXProject; attributes = { BuildIndependentTargetsInParallel = 1; - LastSwiftUpdateCheck = 1520; + LastSwiftUpdateCheck = 2600; LastUpgradeCheck = 1520; TargetAttributes = { 3222F9152B622DFD0019A079 = { @@ -1431,6 +1492,13 @@ CreatedOnToolsVersion = 15.2; TestTargetID = 3222F9152B622DFD0019A079; }; + A312FD7A2FA3391B0044808E = { + CreatedOnToolsVersion = 26.0.1; + }; + A312FD842FA3391C0044808E = { + CreatedOnToolsVersion = 26.0.1; + TestTargetID = 3222F9152B622DFD0019A079; + }; }; }; buildConfigurationList = 3222F9112B622DFD0019A079 /* Build configuration list for PBXProject "IOSAccessAssessment" */; @@ -1452,6 +1520,8 @@ 3222F9152B622DFD0019A079 /* IOSAccessAssessment */, 3222F9252B622E0A0019A079 /* IOSAccessAssessmentTests */, 3222F92F2B622E0A0019A079 /* IOSAccessAssessmentUITests */, + A312FD7A2FA3391B0044808E /* PointNMapShared */, + A312FD842FA3391C0044808E /* PointNMapSharedTests */, ); }; /* End PBXProject section */ @@ -1481,6 +1551,20 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FD792FA3391B0044808E /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + A312FD832FA3391C0044808E /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -1534,7 +1618,6 @@ CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */, - A374B4AA2F8C8B4D003E030D /* CategoricalAttribute.swift in Sources */, A3DC22FB2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift in Sources */, A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */, A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */, @@ -1595,7 +1678,7 @@ 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */, A3A45F0C2EE7A4F40029F5AE /* ContourDetectionPolicy.swift in Sources */, - A3EE6F002F6A29F500F515E6 /* OSMLocation.swift in Sources */, + A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */, A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */, A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */, A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */, @@ -1619,9 +1702,6 @@ A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, - A312FD682FA3308F0044808E /* LocationManager.swift in Sources */, - A312FD692FA3308F0044808E /* LocationHelpers.swift in Sources */, - A312FD6A2FA3308F0044808E /* LocalizationProcessor.swift in Sources */, A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, @@ -1642,8 +1722,6 @@ A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3DC22EF2DCF119A0020CE84 /* HomographyTransformFilter.swift in Sources */, A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */, - A312FD702FA330B80044808E /* RasterizeConfig.swift in Sources */, - A312FD712FA330B80044808E /* Extensions.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, @@ -1689,6 +1767,20 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FD772FA3391B0044808E /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + A312FD812FA3391C0044808E /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ @@ -1702,6 +1794,21 @@ target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; targetProxy = 3222F9312B622E0A0019A079 /* PBXContainerItemProxy */; }; + A312FD882FA3391C0044808E /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = A312FD7A2FA3391B0044808E /* PointNMapShared */; + targetProxy = A312FD872FA3391C0044808E /* PBXContainerItemProxy */; + }; + A312FD8A2FA3391C0044808E /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; + targetProxy = A312FD892FA3391C0044808E /* PBXContainerItemProxy */; + }; + A312FD8F2FA3391C0044808E /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = A312FD7A2FA3391B0044808E /* PointNMapShared */; + targetProxy = A312FD8E2FA3391C0044808E /* PBXContainerItemProxy */; + }; /* End PBXTargetDependency section */ /* Begin XCBuildConfiguration section */ @@ -1854,7 +1961,10 @@ INFOPLIST_KEY_UIRequiredDeviceCapabilities = arkit; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; IPHONEOS_DEPLOYMENT_TARGET = 18.6; - LD_RUNPATH_SEARCH_PATHS = "$(inherited)"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); MARKETING_VERSION = 0.3; MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; OTHER_CFLAGS = "-DACCELERATE_NEW_LAPACK"; @@ -1899,7 +2009,10 @@ INFOPLIST_KEY_UIRequiredDeviceCapabilities = arkit; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; IPHONEOS_DEPLOYMENT_TARGET = 18.6; - LD_RUNPATH_SEARCH_PATHS = "$(inherited)"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); MARKETING_VERSION = 0.3; MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; OTHER_CFLAGS = "-DACCELERATE_NEW_LAPACK"; @@ -1992,6 +2105,126 @@ }; name = Release; }; + A312FD932FA3391C0044808E /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUILD_LIBRARY_FOR_DISTRIBUTION = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = G8MQVE5WWW; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + DYLIB_INSTALL_NAME_BASE = "@rpath"; + ENABLE_MODULE_VERIFIER = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; + MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShared; + PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; + SKIP_INSTALL = YES; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_INSTALL_MODULE = YES; + SWIFT_INSTALL_OBJC_HEADER = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + }; + name = Debug; + }; + A312FD942FA3391C0044808E /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUILD_LIBRARY_FOR_DISTRIBUTION = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = G8MQVE5WWW; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + DYLIB_INSTALL_NAME_BASE = "@rpath"; + ENABLE_MODULE_VERIFIER = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; + MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShared; + PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; + SKIP_INSTALL = YES; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_INSTALL_MODULE = YES; + SWIFT_INSTALL_OBJC_HEADER = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + }; + name = Release; + }; + A312FD962FA3391C0044808E /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = UB6LB27Z2X; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapSharedTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/IOSAccessAssessment.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/IOSAccessAssessment"; + }; + name = Debug; + }; + A312FD972FA3391C0044808E /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = UB6LB27Z2X; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapSharedTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/IOSAccessAssessment.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/IOSAccessAssessment"; + }; + name = Release; + }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ @@ -2031,6 +2264,24 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; + A312FD922FA3391C0044808E /* Build configuration list for PBXNativeTarget "PointNMapShared" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + A312FD932FA3391C0044808E /* Debug */, + A312FD942FA3391C0044808E /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + A312FD952FA3391C0044808E /* Build configuration list for PBXNativeTarget "PointNMapSharedTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + A312FD962FA3391C0044808E /* Debug */, + A312FD972FA3391C0044808E /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ /* Begin XCRemoteSwiftPackageReference section */ diff --git a/IOSAccessAssessment.xcodeproj/xcshareddata/xcschemes/IOSAccessAssessment.xcscheme b/IOSAccessAssessment.xcodeproj/xcshareddata/xcschemes/IOSAccessAssessment.xcscheme index df86a2ff..23ca36f4 100644 --- a/IOSAccessAssessment.xcodeproj/xcshareddata/xcschemes/IOSAccessAssessment.xcscheme +++ b/IOSAccessAssessment.xcodeproj/xcshareddata/xcschemes/IOSAccessAssessment.xcscheme @@ -52,6 +52,17 @@ ReferencedContainer = "container:IOSAccessAssessment.xcodeproj"> + + + + + + + + + + let lidarDepth: Float } @@ -217,12 +217,12 @@ class AttributeEstimationPipeline: ObservableObject { ) { /// Threshold needs to be in Map Units let distanceThreshold = Constants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) - guard let osmLocationDetails = accessibilityFeature.locationDetails else { + guard let LocationDetails = accessibilityFeature.locationDetails else { accessibilityFeature.setIsExisting(false) return } let matchedElement: (any OSWElement)? = mappingData.getMatchedFeature( - to: osmLocationDetails, featureClass: accessibilityFeature.accessibilityFeatureClass, + to: LocationDetails, featureClass: accessibilityFeature.accessibilityFeatureClass, captureId: self.captureImageData?.id, distanceThreshold: distanceThreshold ) diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift index c0df658a..44e3979e 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift @@ -6,7 +6,7 @@ // import SwiftUI import CoreLocation -import PointNMap +import PointNMapShared /** Extension for attribute calculation with rudimentary methods. diff --git a/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift similarity index 67% rename from IOSAccessAssessment/TDEI/OSM/OSMLocation.swift rename to IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift index 1714241b..99941b6f 100644 --- a/IOSAccessAssessment/TDEI/OSM/OSMLocation.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift @@ -8,10 +8,10 @@ import Foundation import CoreLocation -struct OSMLocationElement: Codable, Sendable { +struct LocationElement: Codable, Sendable { var coordinates: [CLLocationCoordinate2D] - /// TODO: We can add an optional `members` property to OSMLocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. -// var members: [OSMLocationElement]? + /// TODO: We can add an optional `members` property to LocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. +// var members: [LocationElement]? var isWay: Bool var isClosed: Bool @@ -61,13 +61,13 @@ struct OSMLocationElement: Codable, Sendable { This support was not implemented because the OSW schema does not support relations. - TODO: - For relation support: we can treat OSMLocationDetails as a tree of OSMLocationElement structs, where each OSMLocationElement can either have a set of coordinates (for nodes and ways) or a set of child OSMLocationElements (for relations). This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. This will be an easier modification because we can simply add an optional `members` property to OSMLocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. - However, this will need modification to caller code that constructs/uses/modifies OSMLocationDetails, because they will need to account for the possibility of nested members when working with OSM data. + For relation support: we can treat LocationDetails as a tree of LocationElement structs, where each LocationElement can either have a set of coordinates (for nodes and ways) or a set of child OSMLocationElements (for relations). This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. This will be an easier modification because we can simply add an optional `members` property to LocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. + However, this will need modification to caller code that constructs/uses/modifies LocationDetails, because they will need to account for the possibility of nested members when working with OSM data. */ -public struct OSMLocationDetails: Codable, Sendable { - var locations: [OSMLocationElement] +public struct LocationDetails: Codable, Sendable { + var locations: [LocationElement] - init(locations: [OSMLocationElement]) { + init(locations: [LocationElement]) { self.locations = locations } @@ -82,6 +82,6 @@ public struct OSMLocationDetails: Codable, Sendable { public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) - self.locations = try container.decode([OSMLocationElement].self, forKey: .locations) + self.locations = try container.decode([LocationElement].self, forKey: .locations) } } diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift index 321b7f01..a653e64c 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift @@ -6,7 +6,7 @@ // import SwiftUI import CoreLocation -import PointNMap +import PointNMapShared extension AttributeEstimationPipeline { func calculateLocation( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift index dcc291c0..430b6c00 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift @@ -6,7 +6,7 @@ // import SwiftUI import CoreLocation -import PointNMap +import PointNMapShared extension AttributeEstimationPipeline { func getLocationFromImageByCentroid( @@ -33,8 +33,8 @@ extension AttributeEstimationPipeline { cameraIntrinsics: captureImageData.cameraIntrinsics, deviceLocation: deviceLocation ) - let locationElement = OSMLocationElement(coordinates: [locationCoordinate], isWay: false, isClosed: false) - let locationDetails = OSMLocationDetails(locations: [locationElement]) + let locationElement = LocationElement(coordinates: [locationCoordinate], isWay: false, isClosed: false) + let locationDetails = LocationDetails(locations: [locationElement]) return LocationRequestResult( locationDetails: locationDetails, locationDelta: locationDelta, lidarDepth: featureDepthValue ) @@ -84,8 +84,8 @@ extension AttributeEstimationPipeline { deviceLocation: deviceLocation ) } - let locationElement = OSMLocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) - let locationDetails = OSMLocationDetails(locations: [locationElement]) + let locationElement = LocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) + let locationDetails = LocationDetails(locations: [locationElement]) let locationDelta = locationDeltas.reduce(SIMD2(0, 0), +) / Float(locationDeltas.count) let lidarDepth = locationDeltas.map { simd_length($0) }.reduce(0, +) / Float(locationDeltas.count) return LocationRequestResult( @@ -133,8 +133,8 @@ extension AttributeEstimationPipeline { deviceLocation: deviceLocation ) } - let locationElement = OSMLocationElement(coordinates: locationCoordinates, isWay: true, isClosed: true) - let locationDetails = OSMLocationDetails(locations: [locationElement]) + let locationElement = LocationElement(coordinates: locationCoordinates, isWay: true, isClosed: true) + let locationDetails = LocationDetails(locations: [locationElement]) let locationDelta = locationDeltas.reduce(SIMD2(0, 0), +) / Float(locationDeltas.count) let lidarDepth = pointDepthValues.reduce(0, +) / Float(pointDepthValues.count) return LocationRequestResult( @@ -184,8 +184,8 @@ extension AttributeEstimationPipeline { deviceLocation: deviceLocation ) } - let locationElement = OSMLocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) - let locationDetails = OSMLocationDetails(locations: [locationElement]) + let locationElement = LocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) + let locationDetails = LocationDetails(locations: [locationElement]) let locationDelta = locationDeltas.reduce(SIMD2(0, 0), +) / Float(locationDeltas.count) let lidarDepth = pointDepthValues.reduce(0, +) / Float(pointDepthValues.count) return LocationRequestResult( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift index 687078af..efe5390e 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift @@ -7,7 +7,7 @@ import SwiftUI import CoreLocation -import PointNMap +import PointNMapShared extension AttributeEstimationPipeline { func getLocationFromMeshForLineStringByPlane( @@ -65,8 +65,8 @@ extension AttributeEstimationPipeline { deviceLocation: deviceLocation ) } - let locationElement = OSMLocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) - let locationDetails = OSMLocationDetails(locations: [locationElement]) + let locationElement = LocationElement(coordinates: locationCoordinates, isWay: true, isClosed: false) + let locationDetails = LocationDetails(locations: [locationElement]) let locationDelta = locationDeltas.reduce(SIMD2(0, 0), +) / Float(locationDeltas.count) let lidarDepth = locationDeltas.map { simd_length($0) }.reduce(0, +) / Float(locationDeltas.count) return LocationRequestResult( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift index f105f866..253b4002 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared extension AttributeEstimationPipeline { func calculateCrossSlope( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift index e2903638..75cd7631 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared extension AttributeEstimationPipeline { func calculateRunningSlope( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift index 6a6768a7..25911ed2 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared extension AttributeEstimationPipeline { func calculateSurfaceIntegrity( diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift index d33b5765..b1b37db2 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared extension AttributeEstimationPipeline { func calculateWidth( diff --git a/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift b/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift index d30daf75..9c96918a 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift @@ -6,367 +6,32 @@ // import Foundation +import PointNMapShared /** Enumeration defining various accessibility feature attributes, along with their metadata and value types. - Note: One needs to be aware of the value types associated with each attribute. The valueType property is only meant for reference. */ -enum AccessibilityFeatureAttribute: String, Identifiable, CaseIterable, Codable, Sendable, Comparable { - case width - case runningSlope - case crossSlope - case surfaceIntegrity - /** - - NOTE: - Experimental attributes - */ - case lidarDepth - case latitudeDelta - case longitudeDelta - /** - - NOTE: - Legacy attributes for comparison with older data - */ - case widthLegacy - case runningSlopeLegacy - case crossSlopeLegacy - case widthFromImage - case runningSlopeFromImage - case crossSlopeFromImage - - enum ValueType: Sendable, Codable, Equatable { - case length - case angle - case flag - case categorical(typeID: String) - } - - enum Value: Sendable, Codable, Equatable { - case length(Measurement) - case angle(Measurement) - case flag(Bool) - case categorical(AnyCategoricalValue) - - static func == (lhs: Value, rhs: Value) -> Bool { - switch (lhs, rhs) { - case (.length(let l1), .length(let l2)): - return l1 == l2 - case (.angle(let a1), .angle(let a2)): - return a1 == a2 - case (.flag(let f1), .flag(let f2)): - return f1 == f2 - case (.categorical(let c1), .categorical(let c2)): - return c1 == c2 - default: - return false - } - } - } - - struct Metadata { - let id: Int - let name: String - let unit: Dimension? - let valueType: ValueType - /// TODO: Verify these OSM tag keys - let osmTagKey: String - } - - private var metadata: Metadata { - switch self { - case .width: - return Metadata( - id: 10, name: "Width", unit: UnitLength.meters, - valueType: .length, - osmTagKey: "width" - ) - case .runningSlope: - return Metadata( - id: 20, name: "Running Slope", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "incline" - ) - case .crossSlope: - return Metadata( - id: 30, name: "Cross Slope", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "cross_slope" - ) - case .surfaceIntegrity: - return Metadata( - id: 40, name: "Surface Integrity", unit: nil, - valueType: .categorical(typeID: SurfaceIntegrityStatus.typeID), - osmTagKey: "surface_integrity" - ) - case .lidarDepth: - return Metadata( - id: 50, name: "LiDAR Depth", unit: UnitLength.meters, - valueType: .length, - osmTagKey: APIConstants.TagKeys.lidarDepthKey - ) - case .latitudeDelta: - return Metadata( - id: 60, name: "Latitude Delta", unit: UnitLength.meters, - valueType: .length, - osmTagKey: APIConstants.TagKeys.latitudeDeltaKey - ) - case .longitudeDelta: - return Metadata( - id: 70, name: "Longitude Delta", unit: UnitLength.meters, - valueType: .length, - osmTagKey: APIConstants.TagKeys.longitudeDeltaKey - ) - case .widthLegacy: - return Metadata( - id: 15, name: "Width Legacy", unit: UnitLength.meters, - valueType: .length, - osmTagKey: "width_legacy" - ) - case .runningSlopeLegacy: - return Metadata( - id: 25, name: "Running Slope Legacy", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "incline_legacy" - ) - case .crossSlopeLegacy: - return Metadata( - id: 35, name: "Cross Slope Legacy", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "cross_slope_legacy" - ) - case .widthFromImage: - return Metadata( - id: 16, name: "Width from Image", unit: UnitLength.meters, - valueType: .length, - osmTagKey: "width_from_image" - ) - case .runningSlopeFromImage: - return Metadata( - id: 26, name: "Running Slope from Image", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "running_slope_from_image" - ) - case .crossSlopeFromImage: - return Metadata( - id: 36, name: "Cross Slope from Image", unit: UnitAngle.degrees, - valueType: .angle, - osmTagKey: "cross_slope_from_image" - ) - } - } - - var id: Int { - return metadata.id - } - - var name: String { - return metadata.name - } - - var unit: Dimension? { - return metadata.unit - } - - var valueType: ValueType { - return metadata.valueType - } - - var displayName: String { - if let unit = unit { - return "\(name) (\(unit.symbol))" - } else { - return name - } - } - - /// TODO: Verify these OSM tag keys - var osmTagKey: String { - return metadata.osmTagKey - } - - static func < (lhs: AccessibilityFeatureAttribute, rhs: AccessibilityFeatureAttribute) -> Bool { - return lhs.id < rhs.id - } -} - -extension AccessibilityFeatureAttribute.Value { - var valueType: AccessibilityFeatureAttribute.ValueType { - switch self { - case .length: return .length - case .angle: return .angle - case .flag: return .flag - case .categorical(let categoricalValue): return .categorical(typeID: categoricalValue.typeID) - } - } -} - -/** - Extensions for AccessibilityFeatureAttribute to provide expected value types, - */ extension AccessibilityFeatureAttribute { -// func isCompatible(with value: Value) -> Bool { -// return self.valueType == value.valueType -// } - func isCompatible(with value: Value) -> Bool { - switch (self.valueType, value) { - case (.length, .length), - (.angle, .angle), - (.flag, .flag): - return true - case (.categorical(let expectedID), .categorical(let cat)): - return cat.typeID == expectedID - default: - return false - } - } -} - -/** - Extension to convert AccessibilityFeatureAttribute.Value to and from primitive types. - */ -extension AccessibilityFeatureAttribute.Value { - func toDouble() -> Double? { - switch self { - case .length(let measurement): - return measurement.converted(to: .meters).value - case .angle(let measurement): - return measurement.converted(to: .degrees).value - case .flag: - return nil - case .categorical: - return nil - } - } - - func toBool() -> Bool? { - switch self { - case .flag(let value): - return value - default: - return nil - } - } - - func toString() -> String? { + /// TODO: Verify these OSM tag keys + public var osmTagKey: String { switch self { - case .length(let measurement): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case .angle(let measurement): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case .flag(let value): - return value ? "yes" : "no" - case .categorical(let value): - return value.rawValue + case .width: return "width" + case .runningSlope: return "incline" + case .crossSlope: return "cross_slope" + case .surfaceIntegrity: return "surface_integrity" + case .lidarDepth: return APIConstants.TagKeys.lidarDepthKey + case .latitudeDelta: return APIConstants.TagKeys.latitudeDeltaKey + case .longitudeDelta: return APIConstants.TagKeys.longitudeDeltaKey + case .widthLegacy: return "width_legacy" + case .runningSlopeLegacy: return "incline_legacy" + case .crossSlopeLegacy: return "cross_slope_legacy" + case .widthFromImage: return "width_from_image" + case .runningSlopeFromImage: return "running_slope_from_image" + case .crossSlopeFromImage: return "cross_slope_from_image" + default: return "" } } } -extension AccessibilityFeatureAttribute { - func value(from double: Double) -> Value? { - switch self.valueType { - case .length: - return .length(Measurement(value: double, unit: .meters)) - case .angle: - return .angle(Measurement(value: double, unit: .degrees)) - case .flag: - return nil // Flags cannot be represented as doubles - case .categorical: - return nil - } - } - - func value(from bool: Bool) -> Value? { - switch self.valueType { - case .flag: - return .flag(bool) - default: - return nil // Only flags can be represented as booleans - } - } - - func value(from categorical: T) -> Value? { - switch self.valueType { - case .categorical(let expectedID): - if T.typeID == expectedID { - return .categorical(AnyCategoricalValue(categorical)) - } else { - return nil // Categorical type ID does not match - } - default: - return nil // Only categorical attributes can be represented as categorical values - } - } - - func value(from categoricalRawValue: String) -> Value? { - switch self.valueType { - case .categorical(let expectedID): - switch expectedID { - case SurfaceIntegrityStatus.typeID: - if let categoricalValue = SurfaceIntegrityStatus(rawValue: categoricalRawValue) { - return .categorical(AnyCategoricalValue(categoricalValue)) - } else { - return nil // Invalid categorical raw value for Surface Integrity - } - default: - guard let decoded = CategoricalAttributeRegistry.decodeToCategoricalValue( - typeID: expectedID, - raw: categoricalRawValue - ) else { - return nil - } - return .categorical(decoded) - } - default: - return nil // Only categorical attributes can be represented as categorical values - } - } - - func categoricalOptions() -> [AnyCategoricalValue] { - guard case .categorical(let typeID) = self.valueType else { - return [] - } - if typeID == SurfaceIntegrityStatus.typeID { - let options = SurfaceIntegrityStatus.allCases.map { AnyCategoricalValue($0) } - return options - } - let options = CategoricalAttributeRegistry.cases(for: typeID) ?? [] - return options - } - - func getValueDescription(attributeValue: Value?) -> String? { - guard let attributeValue = attributeValue else { - return nil - } - switch (self, attributeValue) { - case (.width, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.runningSlope, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case (.crossSlope, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case (.surfaceIntegrity, .categorical(let categoricalValue)): - return categoricalValue.rawValue - case (.lidarDepth, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.latitudeDelta, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.longitudeDelta, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.widthLegacy, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.runningSlopeLegacy, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case (.crossSlopeLegacy, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case (.widthFromImage, .length(let measurement)): - return String(format: "%.2f", measurement.converted(to: .meters).value) - case (.runningSlopeFromImage, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - case (.crossSlopeFromImage, .angle(let measurement)): - return String(format: "%.2f", measurement.converted(to: .degrees).value) - default: - return nil - } - } -} diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift index 6d7cec80..bb40d9e1 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift @@ -7,6 +7,7 @@ import CoreImage import ARKit +import PointNMapShared struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, CustomStringConvertible { let id: String diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift index 043d19e4..87ed0c3f 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift @@ -6,6 +6,7 @@ // import Foundation import CoreLocation +import PointNMapShared enum AccessibilityFeatureError: Error, LocalizedError { case attributeValueMismatch(attribute: AccessibilityFeatureAttribute, value: AccessibilityFeatureAttribute.Value) @@ -23,14 +24,14 @@ protocol AccessibilityFeatureProtocol: Identifiable, Equatable { var accessibilityFeatureClass: AccessibilityFeatureClass { get } - var locationDetails: OSMLocationDetails? { get set } + var locationDetails: LocationDetails? { get set } var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] { get set } var experimentalAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] { get set } func getLastLocationCoordinate() -> CLLocationCoordinate2D? - mutating func setLocationDetails(locationDetails: OSMLocationDetails) + mutating func setLocationDetails(locationDetails: LocationDetails) mutating func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift index ed001965..54ed29d2 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift @@ -6,6 +6,7 @@ // import Foundation import CoreLocation +import PointNMapShared class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatureProtocol, DetectedFeatureProtocol { let id: UUID @@ -16,7 +17,7 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur var selectedAnnotationOption: AnnotationOption = .individualOption(.default) - var locationDetails: OSMLocationDetails? + var locationDetails: LocationDetails? /// If isExisting is false, even if an osw element is associated, it means the feature is new. /// If isExisting is true, it means the feature corresponds to an existing real-world feature, and the oswElement (if present) represents that existing feature in OSW. var isExisting: Bool = false @@ -51,7 +52,7 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur id: UUID = UUID(), accessibilityFeatureClass: AccessibilityFeatureClass, contourDetails: ContourDetails, - locationDetails: OSMLocationDetails?, + locationDetails: LocationDetails?, isExisting: Bool = false, oswElement: (any OSWElement)? = nil, calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?], @@ -79,7 +80,7 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur return lastCoordinate } - func setLocationDetails(locationDetails: OSMLocationDetails) { + func setLocationDetails(locationDetails: LocationDetails) { self.locationDetails = locationDetails } diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift index 5dd7c258..8ccb9e21 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift @@ -6,13 +6,14 @@ // import Foundation import CoreLocation +import PointNMapShared struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, CustomStringConvertible { let id: UUID let accessibilityFeatureClass: AccessibilityFeatureClass - var locationDetails: OSMLocationDetails? + var locationDetails: LocationDetails? var oswElement: any OSWElement var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] @@ -34,7 +35,7 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo init( id: UUID = UUID(), accessibilityFeatureClass: AccessibilityFeatureClass, - locationDetails: OSMLocationDetails?, + locationDetails: LocationDetails?, attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:], experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] = [:], oswElement: any OSWElement @@ -53,7 +54,7 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo return lastCoordinate } - mutating func setLocationDetails(locationDetails: OSMLocationDetails) { + mutating func setLocationDetails(locationDetails: LocationDetails) { self.locationDetails = locationDetails } diff --git a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift index 7e67d554..3f383f73 100644 --- a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift +++ b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift @@ -6,7 +6,7 @@ // import SwiftUI import DequeModule -import PointNMap +import PointNMapShared enum AnnotationImageManagerError: Error, LocalizedError { case notConfigured diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift index 2af0f6ce..64f4da9b 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift @@ -7,7 +7,7 @@ import CoreImage import UIKit -import PointNMap +import PointNMapShared /** A temporary struct to perform rasterization of detected objects. diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift b/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift index ca02d67d..2ab97802 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift @@ -7,7 +7,7 @@ import CoreImage import UIKit -import PointNMap +import PointNMapShared /** Functions to rasterize mesh triangles into an image. diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift b/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift index 895b2dbc..e8909eb1 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift @@ -7,7 +7,7 @@ import CoreImage import UIKit -import PointNMap +import PointNMapShared struct PlaneRasterizer { /** diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift index 4ca77727..ad29cb20 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift +++ b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift @@ -9,6 +9,7 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShared enum SurfaceIntegrityProcessorError: Error, LocalizedError { case metalInitializationFailed diff --git a/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift b/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift index 5ca8e1ad..c2acd72b 100644 --- a/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift +++ b/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift @@ -8,7 +8,7 @@ import CoreLocation import UIKit import MapKit -import PointNMap +import PointNMapShared public extension LocationHelpers { /** @@ -20,7 +20,7 @@ public extension LocationHelpers { First, checks the geometry types of the source and destination location details (e.g., point, linestring, polygon) based on the properties of their last location element. Then, based on the geometry types, it calls the appropriate distance calculation method (e.g., distanceBetweenPoints, distanceFromPointToLineString, distanceFromPointToPolygon, distanceBetweenLineStrings, distanceFromLineStringToPolygon, distanceBetweenPolygons) to compute the distance between the two locations. */ static func distanceBetweenSimilarOSMLocationDetails( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLastLocationElement = srcLocationDetails.locations.last else { return nil @@ -54,7 +54,7 @@ public extension LocationHelpers { Unit of distance is determined by MapKit's MKMapPoint. */ static func distanceBetweenPoints( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == false, srcLocationElement.isClosed == false, @@ -77,7 +77,7 @@ public extension LocationHelpers { Converts the coordinates of the linestring into map points, then iterates through each line segment of the linestring and calculates the distance from the point to that line segment using the distanceFromPointToLineSegment method. The minimum distance found across all segments is returned as the distance from the point to the linestring. */ static func distanceFromPointToLineString( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == false, srcLocationElement.isClosed == false, @@ -112,7 +112,7 @@ public extension LocationHelpers { Converts the coordinates of the polygon into map points, then iterates through each edge of the polygon and calculates the distance from the point to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges is returned as the distance from the point to the polygon. If the point is inside the polygon, the distance returned is 0. */ static func distanceFromPointToPolygon( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == false, srcLocationElement.isClosed == false, @@ -130,14 +130,14 @@ public extension LocationHelpers { } // static func distanceFromPointToMultiPolygon( -// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails // ) -> Double? { // var minDistance: Double = Double.infinity // dstLocationDetails.locations.forEach { locationElement in // guard locationElement.isWay == true, locationElement.isClosed == true else { // return // } -// let singlePolygonLocationDetails = OSMLocationDetails(locations: [locationElement]) +// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) // if let distance = distanceFromPointToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { // minDistance = min(minDistance, distance) // } @@ -156,7 +156,7 @@ public extension LocationHelpers { The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two linestrings may partially overlap with each other, and the distance should reflect how much of the linestrings are outside of each other rather than just indicating that there is some overlap. */ static func distanceBetweenLineStrings( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == true, srcLocationElement.isClosed == false, @@ -197,7 +197,7 @@ public extension LocationHelpers { The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, a linestring may partially overlap with a polygon, and the distance should reflect how much of the linestring is outside the polygon rather than just indicating that there is some overlap. */ static func distanceFromLineStringToPolygon( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == true, srcLocationElement.isClosed == false, @@ -228,14 +228,14 @@ public extension LocationHelpers { } // static func distanceFromLineStringToMultiPolygon( -// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails // ) -> Double? { // var minDistance: Double = Double.infinity // dstLocationDetails.locations.forEach { locationElement in // guard locationElement.isWay == true, locationElement.isClosed == true else { // return // } -// let singlePolygonLocationDetails = OSMLocationDetails(locations: [locationElement]) +// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) // if let distance = distanceFromLineStringToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { // minDistance = min(minDistance, distance) // } @@ -254,7 +254,7 @@ public extension LocationHelpers { The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. */ static func distanceBetweenPolygons( - srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails ) -> Double? { guard let srcLocationElement = srcLocationDetails.locations.last, srcLocationElement.isWay == true, srcLocationElement.isClosed == true, @@ -293,7 +293,7 @@ public extension LocationHelpers { Currently, this algorithm doesn't actually consider the relation role of each multi-polygon member (e.g. outer vs inner), which can lead to inaccurate distance calculations in some cases. For example, if one of the multi-polygons has an inner member that overlaps with the other multi-polygon, the distance should be negative to reflect the degree of overlap. However, without considering the relation type, the algorithm may simply return a distance of 0 for this case, which does not accurately capture the spatial relationship between the two multi-polygons. */ // static func distanceBetweenMultiPolygons( -// srcLocationDetails: OSMLocationDetails, dstLocationDetails: OSMLocationDetails +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails // ) -> Double? { // let srcLocationCoordinateArrays = srcLocationDetails.locations // let dstLocationCoordinateArrays = dstLocationDetails.locations @@ -304,8 +304,8 @@ public extension LocationHelpers { // var minDistance: Double = Double.infinity // for srcLocationCoordinateArray in srcLocationCoordinateArrays { // for dstLocationCoordinateArray in dstLocationCoordinateArrays { -// let srcOSMLocationDetails = OSMLocationDetails(locations: [srcLocationCoordinateArray]) -// let dstOSMLocationDetails = OSMLocationDetails(locations: [dstLocationCoordinateArray]) +// let srcOSMLocationDetails = LocationDetails(locations: [srcLocationCoordinateArray]) +// let dstOSMLocationDetails = LocationDetails(locations: [dstLocationCoordinateArray]) // /// While deciding the geometry, we are not using the .polygon enumeration, since that actually represents a multipolygon in OSW. // let srcGeometry: OSWGeometry = srcLocationCoordinateArray.isWay ? .linestring : .point // let isSrcPolygon = srcLocationCoordinateArray.isWay && srcLocationCoordinateArray.isClosed diff --git a/IOSAccessAssessment/IOSAccessAssessmentApp.swift b/IOSAccessAssessment/IOSAccessAssessmentApp.swift index f0f16cb8..e064229e 100644 --- a/IOSAccessAssessment/IOSAccessAssessmentApp.swift +++ b/IOSAccessAssessment/IOSAccessAssessmentApp.swift @@ -7,6 +7,7 @@ import SwiftUI import TipKit +import PointNMapShared enum AppConstants { enum Texts { diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift index e1ecc401..45e3dab1 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct AccessibilityFeatureSnapshot: Codable, Identifiable, Sendable { var frames: [UUID] = [] @@ -20,7 +21,7 @@ struct AccessibilityFeatureSnapshot: Codable, Identifiable, Sendable { var selectedAnnotationOption: String - var locationDetails: OSMLocationDetails? + var locationDetails: LocationDetails? var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift b/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift index a74aca19..2e183dfd 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift +++ b/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift @@ -7,7 +7,7 @@ import CoreImage import UIKit -import PointNMap +import PointNMapShared struct DamageDetectionRasterizer { static func rasterizeDamageDetection( diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift index 0a870f8c..c0202924 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -11,7 +11,7 @@ import CoreML import OrderedCollections import simd -import PointNMap +import PointNMapShared enum SegmentationARPipelineError: Error, LocalizedError { case isProcessingTrue diff --git a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift index d45f2138..1a272a49 100644 --- a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift +++ b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift @@ -7,7 +7,7 @@ import Foundation import CoreLocation -import PointNMap +import PointNMapShared enum CurrentMappingDataError: Error, LocalizedError { } @@ -174,7 +174,7 @@ class CurrentMappingData: CustomStringConvertible { It iterates through the features of the specified class, calculates the distance from each feature to the given OSM location details, and keeps track of the nearest feature found that is within the distance threshold. If no features are found within the threshold, it returns nil. */ func getNearestFeature( - to osmLocationDetails: OSMLocationDetails, featureClass: AccessibilityFeatureClass, + to LocationDetails: LocationDetails, featureClass: AccessibilityFeatureClass, distanceThreshold: CLLocationDistance = 50.0 ) -> (any OSWElement)? { guard let featureIds = featuresMap[featureClass] else { return nil } @@ -188,7 +188,7 @@ class CurrentMappingData: CustomStringConvertible { feature: feature, geometry: geometry ) else { continue } guard let distance = LocationHelpers.distanceBetweenSimilarOSMLocationDetails( - srcLocationDetails: featureOSMLocationDetails, dstLocationDetails: osmLocationDetails + srcLocationDetails: featureOSMLocationDetails, dstLocationDetails: LocationDetails ) else { continue } if distance < nearestDistance { nearestFeature = feature @@ -202,7 +202,7 @@ class CurrentMappingData: CustomStringConvertible { This function takes in OSM location details, an accessibility feature class, and a capture ID, and returns the feature of that class whose capture ID matches the given capture ID. */ func getCaptureMatchedFeature( - to osmLocationDetails: OSMLocationDetails, featureClass: AccessibilityFeatureClass, + to LocationDetails: LocationDetails, featureClass: AccessibilityFeatureClass, captureId: UUID ) -> (any OSWElement)? { guard let featureIds = featuresMap[featureClass] else { return nil } @@ -226,20 +226,20 @@ class CurrentMappingData: CustomStringConvertible { It first attempts to find a feature that matches the capture ID, and if it finds one, it directly returns it. Else, it falls back to finding the nearest feature within the distance threshold. */ func getMatchedFeature( - to osmLocationDetails: OSMLocationDetails, featureClass: AccessibilityFeatureClass, + to LocationDetails: LocationDetails, featureClass: AccessibilityFeatureClass, captureId: UUID?, distanceThreshold: CLLocationDistance = 50.0 ) -> (any OSWElement)? { if let captureId = captureId { let captureMatchedFeature = getCaptureMatchedFeature( - to: osmLocationDetails, featureClass: featureClass, captureId: captureId + to: LocationDetails, featureClass: featureClass, captureId: captureId ) if let captureMatchedFeature = captureMatchedFeature { return captureMatchedFeature } } return getNearestFeature( - to: osmLocationDetails, featureClass: featureClass, distanceThreshold: distanceThreshold + to: LocationDetails, featureClass: featureClass, distanceThreshold: distanceThreshold ) } @@ -270,37 +270,37 @@ class CurrentMappingData: CustomStringConvertible { /// Note: OSWGeometry is not required as a parameter here since the feature itself carries geometry information based on the type of OSWElement it is. private func getFeatureOSMLocationDetails( feature: any OSWElement, geometry: OSWGeometry - ) -> OSMLocationDetails? { + ) -> LocationDetails? { switch geometry { case .point: guard let point = feature as? OSWPoint else { return nil } let coordinates: [CLLocationCoordinate2D] = [CLLocationCoordinate2D( latitude: point.latitude, longitude: point.longitude )] - let osmLocationElement: OSMLocationElement = OSMLocationElement( + let LocationElement: LocationElement = LocationElement( coordinates: coordinates, isWay: false, isClosed: false ) - return OSMLocationDetails(locations: [osmLocationElement]) + return LocationDetails(locations: [LocationElement]) case .linestring: guard let lineString = feature as? OSWLineString else { return nil } let coordinates: [CLLocationCoordinate2D] = lineString.pointRefs.compactMap { pointRef in guard let point = self.getFeature(featureId: pointRef, geometry: .point) as? OSWPoint else { return nil } return CLLocationCoordinate2D(latitude: point.latitude, longitude: point.longitude) } - let osmLocationElement: OSMLocationElement = OSMLocationElement( + let LocationElement: LocationElement = LocationElement( coordinates: coordinates, isWay: true, isClosed: false ) - return OSMLocationDetails(locations: [osmLocationElement]) + return LocationDetails(locations: [LocationElement]) case .polygon: guard let polygon = feature as? OSWPolygon else { return nil } let coordinates: [CLLocationCoordinate2D] = polygon.pointRefs.compactMap { pointRef in guard let point = self.getFeature(featureId: pointRef, geometry: .point) as? OSWPoint else { return nil } return CLLocationCoordinate2D(latitude: point.latitude, longitude: point.longitude) } - let osmLocationElement: OSMLocationElement = OSMLocationElement( + let LocationElement: LocationElement = LocationElement( coordinates: coordinates, isWay: true, isClosed: true ) - return OSMLocationDetails(locations: [osmLocationElement]) + return LocationDetails(locations: [LocationElement]) } } } diff --git a/IOSAccessAssessment/Shared/SharedAppData.swift b/IOSAccessAssessment/Shared/SharedAppData.swift index 15eedd45..e38e9d37 100644 --- a/IOSAccessAssessment/Shared/SharedAppData.swift +++ b/IOSAccessAssessment/Shared/SharedAppData.swift @@ -8,7 +8,7 @@ import SwiftUI import DequeModule import simd -import PointNMap +import PointNMapShared @MainActor final class SharedAppData: ObservableObject { diff --git a/IOSAccessAssessment/TDEI/OSW/OSWElement.swift b/IOSAccessAssessment/TDEI/OSW/OSWElement.swift index 0eaa8059..5091904f 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWElement.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWElement.swift @@ -6,6 +6,7 @@ // import Foundation import CoreLocation +import PointNMapShared protocol OSWElement: Sendable, CustomStringConvertible { var osmElementType: OSMElementType { get } diff --git a/IOSAccessAssessment/TDEI/OSW/OSWLineString.swift b/IOSAccessAssessment/TDEI/OSW/OSWLineString.swift index 6a6dce22..6552ee35 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWLineString.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWLineString.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct OSWLineString: OSWElement { let osmElementType: OSMElementType = .way diff --git a/IOSAccessAssessment/TDEI/OSW/OSWMultiPolygon.swift b/IOSAccessAssessment/TDEI/OSW/OSWMultiPolygon.swift index 17115966..92392f4c 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWMultiPolygon.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWMultiPolygon.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct OSWRelationMemberRef: Sendable { let type: OSMElementType diff --git a/IOSAccessAssessment/TDEI/OSW/OSWPoint.swift b/IOSAccessAssessment/TDEI/OSW/OSWPoint.swift index 1ed4ea2f..965c12f1 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWPoint.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWPoint.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct OSWPoint: OSWElement { let osmElementType: OSMElementType = .node diff --git a/IOSAccessAssessment/TDEI/OSW/OSWPolygon.swift b/IOSAccessAssessment/TDEI/OSW/OSWPolygon.swift index 7a1ffe63..30394d09 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWPolygon.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWPolygon.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct OSWPolygon: OSWElement { let osmElementType: OSMElementType = .way diff --git a/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift b/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift index ed0c339b..99b834a1 100644 --- a/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift +++ b/IOSAccessAssessment/TDEI/Services/WorkspaceService.swift @@ -7,7 +7,7 @@ import Foundation import CoreLocation -import PointNMap +import PointNMapShared struct Workspace: Codable, Hashable { let id: Int diff --git a/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift b/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift index 25177a39..95c400db 100644 --- a/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift +++ b/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared enum APIChangesetUploadError: Error, LocalizedError { case featureClassNotLineString(AccessibilityFeatureClass) @@ -379,7 +380,7 @@ extension APIChangesetUploadController { ) -> (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) { let oswElementClass = feature.accessibilityFeatureClass.oswPolicy.oswElementClass guard oswElementClass.geometry == .linestring else { return ([], []) } - guard let featureLocationElement: OSMLocationElement = feature.locationDetails?.locations.first, + guard let featureLocationElement: LocationElement = feature.locationDetails?.locations.first, featureLocationElement.isWay, !featureLocationElement.isClosed else { return ([], []) } @@ -446,7 +447,7 @@ extension APIChangesetUploadController { ) -> (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) { let oswElementClass = feature.accessibilityFeatureClass.oswPolicy.oswElementClass guard oswElementClass.geometry == .polygon else { return ([], []) } - guard let featureLocationElement: OSMLocationElement = feature.locationDetails?.locations.first, + guard let featureLocationElement: LocationElement = feature.locationDetails?.locations.first, featureLocationElement.isWay, featureLocationElement.isClosed else { return ([], []) } diff --git a/IOSAccessAssessment/View/ARCameraView.swift b/IOSAccessAssessment/View/ARCameraView.swift index d4496b08..97b73628 100644 --- a/IOSAccessAssessment/View/ARCameraView.swift +++ b/IOSAccessAssessment/View/ARCameraView.swift @@ -12,7 +12,7 @@ import Metal import CoreImage import MetalKit import CoreLocation -import PointNMap +import PointNMapShared enum ARCameraViewConstants { enum Texts { diff --git a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift index 35760471..eacc2c4a 100644 --- a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift +++ b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift @@ -6,6 +6,7 @@ // import SwiftUI +import PointNMapShared /** A view that displays detailed information about an accessibility feature annotation. diff --git a/IOSAccessAssessment/View/TestMode/TestCameraView.swift b/IOSAccessAssessment/View/TestMode/TestCameraView.swift index 9cec20a4..21f12e3d 100644 --- a/IOSAccessAssessment/View/TestMode/TestCameraView.swift +++ b/IOSAccessAssessment/View/TestMode/TestCameraView.swift @@ -7,7 +7,7 @@ import SwiftUI import CoreLocation -import PointNMap +import PointNMapShared /** Additional constants unique to TestCameraView (not used in ARCameraView) diff --git a/PointNMapShared/PointNMapShared.docc/PointNMapShared.md b/PointNMapShared/PointNMapShared.docc/PointNMapShared.md new file mode 100644 index 00000000..a7404f02 --- /dev/null +++ b/PointNMapShared/PointNMapShared.docc/PointNMapShared.md @@ -0,0 +1,13 @@ +# ``PointNMapShared`` + +Summary + +## Overview + +Text + +## Topics + +### Group + +- ``Symbol`` \ No newline at end of file diff --git a/PointNMapShared/PointNMapShared.swift b/PointNMapShared/PointNMapShared.swift new file mode 100644 index 00000000..8959ce4a --- /dev/null +++ b/PointNMapShared/PointNMapShared.swift @@ -0,0 +1,9 @@ +// +// PointNMapShared.swift +// PointNMapShared +// +// Created by Himanshu on 4/30/26. +// + +import Foundation + diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift new file mode 100644 index 00000000..f531d0ca --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift @@ -0,0 +1,352 @@ +// +// AccessibilityFeatureAttribute.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/9/25. +// + +import Foundation + +/** + Enumeration defining various accessibility feature attributes, along with their metadata and value types. + + - Note: One needs to be aware of the value types associated with each attribute. The valueType property is only meant for reference. + */ +public enum AccessibilityFeatureAttribute: String, Identifiable, CaseIterable, Codable, Sendable, Comparable { + case width + case runningSlope + case crossSlope + case surfaceIntegrity + /** + - NOTE: + Experimental attributes + */ + case lidarDepth + case latitudeDelta + case longitudeDelta + /** + - NOTE: + Legacy attributes for comparison with older data + */ + case widthLegacy + case runningSlopeLegacy + case crossSlopeLegacy + case widthFromImage + case runningSlopeFromImage + case crossSlopeFromImage + + public enum ValueType: Sendable, Codable, Equatable { + case length + case angle + case flag + case categorical(typeID: String) + } + + public enum Value: Sendable, Codable, Equatable { + case length(Measurement) + case angle(Measurement) + case flag(Bool) + case categorical(AnyCategoricalValue) + + public static func == (lhs: Value, rhs: Value) -> Bool { + switch (lhs, rhs) { + case (.length(let l1), .length(let l2)): + return l1 == l2 + case (.angle(let a1), .angle(let a2)): + return a1 == a2 + case (.flag(let f1), .flag(let f2)): + return f1 == f2 + case (.categorical(let c1), .categorical(let c2)): + return c1 == c2 + default: + return false + } + } + } + + public struct Metadata { + let id: Int + let name: String + let unit: Dimension? + let valueType: ValueType + } + + private var metadata: Metadata { + switch self { + case .width: + return Metadata( + id: 10, name: "Width", unit: UnitLength.meters, + valueType: .length, + ) + case .runningSlope: + return Metadata( + id: 20, name: "Running Slope", unit: UnitAngle.degrees, + valueType: .angle, + ) + case .crossSlope: + return Metadata( + id: 30, name: "Cross Slope", unit: UnitAngle.degrees, + valueType: .angle, + ) + case .surfaceIntegrity: + return Metadata( + id: 40, name: "Surface Integrity", unit: nil, + valueType: .categorical(typeID: SurfaceIntegrityStatus.typeID), + ) + case .lidarDepth: + return Metadata( + id: 50, name: "LiDAR Depth", unit: UnitLength.meters, + valueType: .length, + ) + case .latitudeDelta: + return Metadata( + id: 60, name: "Latitude Delta", unit: UnitLength.meters, + valueType: .length, + ) + case .longitudeDelta: + return Metadata( + id: 70, name: "Longitude Delta", unit: UnitLength.meters, + valueType: .length, + ) + case .widthLegacy: + return Metadata( + id: 15, name: "Width Legacy", unit: UnitLength.meters, + valueType: .length, + ) + case .runningSlopeLegacy: + return Metadata( + id: 25, name: "Running Slope Legacy", unit: UnitAngle.degrees, + valueType: .angle, + ) + case .crossSlopeLegacy: + return Metadata( + id: 35, name: "Cross Slope Legacy", unit: UnitAngle.degrees, + valueType: .angle, + ) + case .widthFromImage: + return Metadata( + id: 16, name: "Width from Image", unit: UnitLength.meters, + valueType: .length, + ) + case .runningSlopeFromImage: + return Metadata( + id: 26, name: "Running Slope from Image", unit: UnitAngle.degrees, + valueType: .angle, + ) + case .crossSlopeFromImage: + return Metadata( + id: 36, name: "Cross Slope from Image", unit: UnitAngle.degrees, + valueType: .angle, + ) + } + } + + public var id: Int { + return metadata.id + } + + public var name: String { + return metadata.name + } + + public var unit: Dimension? { + return metadata.unit + } + + public var valueType: ValueType { + return metadata.valueType + } + + public var displayName: String { + if let unit = unit { + return "\(name) (\(unit.symbol))" + } else { + return name + } + } + + public static func < (lhs: AccessibilityFeatureAttribute, rhs: AccessibilityFeatureAttribute) -> Bool { + return lhs.id < rhs.id + } +} + +public extension AccessibilityFeatureAttribute.Value { + var valueType: AccessibilityFeatureAttribute.ValueType { + switch self { + case .length: return .length + case .angle: return .angle + case .flag: return .flag + case .categorical(let categoricalValue): return .categorical(typeID: categoricalValue.typeID) + } + } +} + +/** + Extensions for AccessibilityFeatureAttribute to provide expected value types, + */ +public extension AccessibilityFeatureAttribute { +// func isCompatible(with value: Value) -> Bool { +// return self.valueType == value.valueType +// } + func isCompatible(with value: Value) -> Bool { + switch (self.valueType, value) { + case (.length, .length), + (.angle, .angle), + (.flag, .flag): + return true + case (.categorical(let expectedID), .categorical(let cat)): + return cat.typeID == expectedID + default: + return false + } + } +} + +/** + Extension to convert AccessibilityFeatureAttribute.Value to and from primitive types. + */ +public extension AccessibilityFeatureAttribute.Value { + func toDouble() -> Double? { + switch self { + case .length(let measurement): + return measurement.converted(to: .meters).value + case .angle(let measurement): + return measurement.converted(to: .degrees).value + case .flag: + return nil + case .categorical: + return nil + } + } + + func toBool() -> Bool? { + switch self { + case .flag(let value): + return value + default: + return nil + } + } + + func toString() -> String? { + switch self { + case .length(let measurement): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case .angle(let measurement): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case .flag(let value): + return value ? "yes" : "no" + case .categorical(let value): + return value.rawValue + } + } +} + +public extension AccessibilityFeatureAttribute { + func value(from double: Double) -> Value? { + switch self.valueType { + case .length: + return .length(Measurement(value: double, unit: .meters)) + case .angle: + return .angle(Measurement(value: double, unit: .degrees)) + case .flag: + return nil // Flags cannot be represented as doubles + case .categorical: + return nil + } + } + + func value(from bool: Bool) -> Value? { + switch self.valueType { + case .flag: + return .flag(bool) + default: + return nil // Only flags can be represented as booleans + } + } + + func value(from categorical: T) -> Value? { + switch self.valueType { + case .categorical(let expectedID): + if T.typeID == expectedID { + return .categorical(AnyCategoricalValue(categorical)) + } else { + return nil // Categorical type ID does not match + } + default: + return nil // Only categorical attributes can be represented as categorical values + } + } + + func value(from categoricalRawValue: String) -> Value? { + switch self.valueType { + case .categorical(let expectedID): + switch expectedID { + case SurfaceIntegrityStatus.typeID: + if let categoricalValue = SurfaceIntegrityStatus(rawValue: categoricalRawValue) { + return .categorical(AnyCategoricalValue(categoricalValue)) + } else { + return nil // Invalid categorical raw value for Surface Integrity + } + default: + guard let decoded = CategoricalAttributeRegistry.decodeToCategoricalValue( + typeID: expectedID, + raw: categoricalRawValue + ) else { + return nil + } + return .categorical(decoded) + } + default: + return nil // Only categorical attributes can be represented as categorical values + } + } + + func categoricalOptions() -> [AnyCategoricalValue] { + guard case .categorical(let typeID) = self.valueType else { + return [] + } + if typeID == SurfaceIntegrityStatus.typeID { + let options = SurfaceIntegrityStatus.allCases.map { AnyCategoricalValue($0) } + return options + } + let options = CategoricalAttributeRegistry.cases(for: typeID) ?? [] + return options + } + + func getValueDescription(attributeValue: Value?) -> String? { + guard let attributeValue = attributeValue else { + return nil + } + switch (self, attributeValue) { + case (.width, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.runningSlope, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case (.crossSlope, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case (.surfaceIntegrity, .categorical(let categoricalValue)): + return categoricalValue.rawValue + case (.lidarDepth, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.latitudeDelta, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.longitudeDelta, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.widthLegacy, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.runningSlopeLegacy, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case (.crossSlopeLegacy, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case (.widthFromImage, .length(let measurement)): + return String(format: "%.2f", measurement.converted(to: .meters).value) + case (.runningSlopeFromImage, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + case (.crossSlopeFromImage, .angle(let measurement)): + return String(format: "%.2f", measurement.converted(to: .degrees).value) + default: + return nil + } + } +} diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/CategoricalAttribute.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/CategoricalAttribute.swift new file mode 100644 index 00000000..d971eac2 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Attributes/CategoricalAttribute.swift @@ -0,0 +1,102 @@ +// +// CategoricalAttribute.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/12/26. +// + +import Foundation + +public protocol FeatureCategorical: Codable, CaseIterable, Sendable, RawRepresentable where RawValue == String { + static var typeID: String { get } // unique identifier +} + +public struct AnyCategoricalValue: Codable, Sendable, Equatable, Hashable { + public let typeID: String + public let rawValue: String + + public init(typeID: String, rawValue: String) { + self.typeID = typeID + self.rawValue = rawValue + } + + public init(_ value: T) { + self.typeID = T.typeID + self.rawValue = value.rawValue + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + self.typeID = try container.decode(String.self, forKey: .typeID) + self.rawValue = try container.decode(String.self, forKey: .rawValue) + } + + public func value(as type: T.Type) -> T? { + guard type.typeID == self.typeID else { + return nil + } + return T(rawValue: self.rawValue) + } +} + +public enum SurfaceIntegrityStatus: String, FeatureCategorical, Comparable { + case intact + case slight + case moderate + case severe + + /// - Warning: This typeID must not be changed, as it is used to locally store accessibilty feature details, and changing this would break the decoding of existing data + /// . If you need to change the typeID for some reason, please implement a migration strategy to update existing stored data with the new typeID. + public static let typeID = "surface_integrity_status" + + public static func < (lhs: SurfaceIntegrityStatus, rhs: SurfaceIntegrityStatus) -> Bool { + switch (lhs, rhs) { + case (.intact, .slight), (.intact, .moderate), (.intact, .severe), + (.slight, .moderate), (.slight, .severe), + (.moderate, .severe): + return true + default: + return false + } + } +} + +/** + A registry for categorical attributes that allows for dynamic registration and decoding of categorical types based on their unique type identifiers. + This enables the system to support a wide range of categorical attributes without hardcoding each type, making it extensible and adaptable to future needs. + */ +public struct CategoricalAttributeRegistry { + + private static var decoders: [String: (String) -> Any?] = [:] + private static var allCases: [String: () -> [AnyCategoricalValue]] = [:] + + public static func register( + _ type: T.Type + ) { + decoders[T.typeID] = { raw in + return T(rawValue: raw) + } + allCases[T.typeID] = { + return T.allCases.map { AnyCategoricalValue($0) } + } + } + + public static func registerAll() { + register(SurfaceIntegrityStatus.self) + } + + public static func decode(typeID: String, raw: String) -> Any? { + return decoders[typeID]?(raw) + } + + public static func decodeToCategoricalValue(typeID: String, raw: String) -> AnyCategoricalValue? { + guard let _ = decode(typeID: typeID, raw: raw) else { + return nil + } + return AnyCategoricalValue(typeID: typeID, rawValue: raw) + } + + public static func cases(for typeID: String) -> [AnyCategoricalValue]? { + return allCases[typeID]?() + } +} diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift b/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift similarity index 100% rename from IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift rename to PointNMapShared/Sources/PointNMap/Geospatial/LocalizationProcessor.swift diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift b/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift similarity index 100% rename from IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift rename to PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift b/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift similarity index 100% rename from IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift rename to PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift b/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift similarity index 100% rename from IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift rename to PointNMapShared/Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift diff --git a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift b/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift similarity index 80% rename from IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift rename to PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift index 310e9114..fd46642f 100644 --- a/IOSAccessAssessment/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/Utils/Extensions.swift @@ -8,7 +8,7 @@ import Foundation public extension Double { - public func roundedTo7Digits() -> Double { + func roundedTo7Digits() -> Double { (self * 1_000_0000).rounded() / 1_000_0000 } } diff --git a/PointNMapSharedTests/PointNMapSharedTests.swift b/PointNMapSharedTests/PointNMapSharedTests.swift new file mode 100644 index 00000000..36e7e7c5 --- /dev/null +++ b/PointNMapSharedTests/PointNMapSharedTests.swift @@ -0,0 +1,17 @@ +// +// PointNMapSharedTests.swift +// PointNMapSharedTests +// +// Created by Himanshu on 4/30/26. +// + +import Testing +@testable import PointNMapShared + +struct PointNMapSharedTests { + + @Test func example() async throws { + // Write your test here and use APIs like `#expect(...)` to check expected conditions. + } + +} From b27ec8f5ccf3c2070633fc104b5a418535029a4d Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 13:04:48 -0700 Subject: [PATCH 05/14] First solution attempt at creating a common ShaderTypes file --- IOSAccessAssessment.xcodeproj/project.pbxproj | 314 +++++++++++++----- ...essibilityFeatureAttributeExtension.swift} | 0 ...swift => DepthMapProcessorExtension.swift} | 91 +---- .../UnionOfMasks/UnionOfMasksProcessor.swift | 1 + .../PointNMapShaderTypes.md | 13 + PointNMapShaderTypes/PointNMapShaderTypes.h | 17 + .../ShaderTypes.h | 0 .../Stub.m | 1 + PointNMapShared/PointNMapShared.h | 15 + PointNMapShared/PointNMapShared.swift | 7 + .../Components/ContourDetectionPolicy.swift | 25 ++ .../Components/UnionOfMasksPolicy.swift | 34 ++ .../Image/Depth/DepthFilter.swift | 4 +- .../Image/Depth/DepthFiltering.metal | 2 +- .../Image/Depth/DepthMapProcessor.swift | 93 ++++++ .../Image/Utils/CGImageUtils.swift | 19 ++ .../Image/Utils/CIImageUtils.swift | 215 ++++++++++++ .../Image/Utils/CVPixelBufferUtils.swift | 301 +++++++++++++++++ .../Utils/CenterCropTransformUtils.metal | 36 ++ .../Utils/CenterCropTransformUtils.swift | 117 +++++++ .../CenterCropTransformUtilsExtension.swift | 244 ++++++++++++++ .../Image/Utils/MTLTextureUtils.swift | 25 ++ .../Geospatial/LocationManager.swift | 1 + 23 files changed, 1397 insertions(+), 178 deletions(-) rename IOSAccessAssessment/AccessibilityFeature/Attributes/{AccessibilityFeatureAttribute.swift => AccessibilityFeatureAttributeExtension.swift} (100%) rename IOSAccessAssessment/ComputerVision/Image/Depth/{DepthMapProcessor.swift => DepthMapProcessorExtension.swift} (54%) create mode 100644 PointNMapShaderTypes/PointNMapShaderTypes.docc/PointNMapShaderTypes.md create mode 100644 PointNMapShaderTypes/PointNMapShaderTypes.h rename {IOSAccessAssessment => PointNMapShaderTypes}/ShaderTypes.h (100%) rename {IOSAccessAssessment => PointNMapShaderTypes}/Stub.m (82%) create mode 100644 PointNMapShared/PointNMapShared.h create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/ContourDetectionPolicy.swift create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/UnionOfMasksPolicy.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Depth/DepthFilter.swift (96%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Depth/DepthFiltering.metal (96%) create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.metal create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtilsExtension.swift create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/MTLTextureUtils.swift diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 9cc41e71..648c8acc 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -27,7 +27,7 @@ A30801612EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801592EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift */; }; A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */; }; A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */; }; - A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttribute.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttribute.swift */; }; + A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */; }; A30BED382ED162E7004A5B51 /* MeshDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED372ED162E2004A5B51 /* MeshDefinitions.swift */; }; A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */; }; A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED3B2ED2F487004A5B51 /* MeshClusteringUtils.swift */; }; @@ -38,13 +38,14 @@ A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */; }; A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */; }; A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */; }; - A312FD742FA336020044808E /* DepthFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FD722FA336020044808E /* DepthFilter.swift */; }; - A312FD752FA336020044808E /* DepthFiltering.metal in Sources */ = {isa = PBXBuildFile; fileRef = A312FD732FA336020044808E /* DepthFiltering.metal */; }; A312FD862FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + A312FDCD2FA3DBD50044808E /* DepthMapProcessorExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */; }; + A312FE152FA3EBE80044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; + A312FE162FA3EBE80044808E /* PointNMapShaderTypes.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; - A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */; }; A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */; }; @@ -105,15 +106,12 @@ A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */; }; A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338BE2EDA889A00F1A402 /* CustomPicker.swift */; }; A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */; }; - A38338C62EDAF3E900F1A402 /* DepthMapProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338C52EDAF3E500F1A402 /* DepthMapProcessor.swift */; }; A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3A2DD9B03000455E45 /* OSMElement.swift */; }; A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; A3A413A22EC9C3FA0039298C /* MeshRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */; }; A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A52ECD86260039298C /* AccessibilityFeature.swift */; }; A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413AC2ECF94950039298C /* DBSCAN.swift */; }; - A3A45F0A2EE7A4E10029F5AE /* UnionOfMasksPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A45F092EE7A4DE0029F5AE /* UnionOfMasksPolicy.swift */; }; - A3A45F0C2EE7A4F40029F5AE /* ContourDetectionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A45F0B2EE7A4F10029F5AE /* ContourDetectionPolicy.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */; }; A3B2DDBF2DC99DEF003416FB /* HomographyRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */; }; @@ -132,15 +130,12 @@ A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7432F886D3100833411 /* SurfaceIntegrity.metal */; }; A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7462F886D8E00833411 /* SurfaceIntegrityFromImageExtension.swift */; }; A3C1D7492F886DDE00833411 /* SurfaceIntegrityFromMeshExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7482F886DD800833411 /* SurfaceIntegrityFromMeshExtension.swift */; }; - A3C22FD32CF194A600533BF7 /* CGImageUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C22FD22CF194A200533BF7 /* CGImageUtils.swift */; }; A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */ = {isa = PBXBuildFile; productRef = A3C22FD72CF2F0C300533BF7 /* DequeModule */; }; A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; - A3C55A492EAFFABF00F6CFDC /* CenterCropTransformUtilsExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A482EAFFAB600F6CFDC /* CenterCropTransformUtilsExtension.swift */; }; A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */; }; A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D752E654F14003BFE78 /* ProfileView.swift */; }; A3DA4DA82EB94D84005BB812 /* MeshGPUSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */; }; A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */; }; - A3DA4DB62EBAE101005BB812 /* Stub.m in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DB52EBAE101005BB812 /* Stub.m */; }; A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */; }; A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */; }; A3DC22E92DCF0F9A0020CE84 /* ImageProcessing.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22E82DCF0F9A0020CE84 /* ImageProcessing.metal */; }; @@ -149,8 +144,6 @@ A3DC22F72DD032960020CE84 /* UnionOfMasks.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22F62DD032960020CE84 /* UnionOfMasks.metal */; }; A3DC22F92DD036AF0020CE84 /* UnionOfMasksProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22F82DD0369E0020CE84 /* UnionOfMasksProcessor.swift */; }; A3DC22FB2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22FA2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift */; }; - A3E161D22F3A8AF6002D4D08 /* CenterCropTransformUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3E161D12F3A8AEF002D4D08 /* CenterCropTransformUtils.swift */; }; - A3E161D42F3A9922002D4D08 /* CenterCropTransformUtils.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3E161D32F3A991E002D4D08 /* CenterCropTransformUtils.metal */; }; A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3E162772F3AFC63002D4D08 /* MeshCoder.swift */; }; A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A3E6D2322F464A2700DAF88E /* PngDecoder.mm */; }; A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E422F57A98A00F515E6 /* DatasetDecoder.swift */; }; @@ -190,8 +183,6 @@ DAA7F8B52CA38C11003666D8 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B42CA38C11003666D8 /* Constants.swift */; }; DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */; }; DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */; }; - DAA7F8C82CA76527003666D8 /* CIImageUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C72CA76527003666D8 /* CIImageUtils.swift */; }; - DAA7F8CA2CA76550003666D8 /* CVPixelBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C92CA76550003666D8 /* CVPixelBufferUtils.swift */; }; DAA7F8CC2CA77FA5003666D8 /* GrayscaleToColorFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */; }; /* End PBXBuildFile section */ @@ -231,6 +222,20 @@ remoteGlobalIDString = A312FD7A2FA3391B0044808E; remoteInfo = PointNMapShared; }; + A312FE132FA3EBE80044808E /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 3222F90E2B622DFD0019A079 /* Project object */; + proxyType = 1; + remoteGlobalIDString = A312FE0C2FA3EBE80044808E; + remoteInfo = PointNMapShaderTypes; + }; + A312FE1D2FA3EC5C0044808E /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 3222F90E2B622DFD0019A079 /* Project object */; + proxyType = 1; + remoteGlobalIDString = A312FE0C2FA3EBE80044808E; + remoteInfo = PointNMapShaderTypes; + }; /* End PBXContainerItemProxy section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -240,6 +245,7 @@ dstPath = ""; dstSubfolderSpec = 10; files = ( + A312FE162FA3EBE80044808E /* PointNMapShaderTypes.framework in Embed Frameworks */, A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */, ); name = "Embed Frameworks"; @@ -272,7 +278,7 @@ A308015B2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MapillaryCustom11ClassConfig.swift; sourceTree = ""; }; A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFeature.swift; sourceTree = ""; }; A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshInstancePolicy.swift; sourceTree = ""; }; - A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttribute.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttribute.swift; sourceTree = ""; }; + A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttributeExtension.swift; sourceTree = ""; }; A30BED372ED162E2004A5B51 /* MeshDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshDefinitions.swift; sourceTree = ""; }; A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConnectedComponents.swift; sourceTree = ""; }; A30BED3B2ED2F487004A5B51 /* MeshClusteringUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshClusteringUtils.swift; sourceTree = ""; }; @@ -283,12 +289,11 @@ A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunninSlopeExtension.swift; sourceTree = ""; }; A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CrossSlopeExtension.swift; sourceTree = ""; }; A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityExtension.swift; sourceTree = ""; }; - A312FD722FA336020044808E /* DepthFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthFilter.swift; sourceTree = ""; }; - A312FD732FA336020044808E /* DepthFiltering.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = DepthFiltering.metal; sourceTree = ""; }; A312FD7B2FA3391B0044808E /* PointNMapShared.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShared.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PointNMapSharedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthMapProcessorExtension.swift; sourceTree = ""; }; + A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShaderTypes.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; - A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MTLTextureUtils.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWGeometry.swift; sourceTree = ""; }; A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWElementClass.swift; sourceTree = ""; }; @@ -351,15 +356,12 @@ A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourFeatureRasterizer.swift; sourceTree = ""; }; A38338BE2EDA889A00F1A402 /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationFeatureDetailView.swift; sourceTree = ""; }; - A38338C52EDAF3E500F1A402 /* DepthMapProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthMapProcessor.swift; sourceTree = ""; }; A39C9F3A2DD9B03000455E45 /* OSMElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMElement.swift; sourceTree = ""; }; A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshRasterizer.swift; sourceTree = ""; }; A3A413A52ECD86260039298C /* AccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeature.swift; sourceTree = ""; }; A3A413AC2ECF94950039298C /* DBSCAN.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DBSCAN.swift; sourceTree = ""; }; - A3A45F092EE7A4DE0029F5AE /* UnionOfMasksPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnionOfMasksPolicy.swift; sourceTree = ""; }; - A3A45F0B2EE7A4F10029F5AE /* ContourDetectionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourDetectionPolicy.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneRasterizer.swift; sourceTree = ""; }; A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomographyRequestProcessor.swift; sourceTree = ""; }; @@ -378,16 +380,12 @@ A3C1D7432F886D3100833411 /* SurfaceIntegrity.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = SurfaceIntegrity.metal; sourceTree = ""; }; A3C1D7462F886D8E00833411 /* SurfaceIntegrityFromImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityFromImageExtension.swift; sourceTree = ""; }; A3C1D7482F886DD800833411 /* SurfaceIntegrityFromMeshExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityFromMeshExtension.swift; sourceTree = ""; }; - A3C22FD22CF194A200533BF7 /* CGImageUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CGImageUtils.swift; sourceTree = ""; }; A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; - A3C55A482EAFFAB600F6CFDC /* CenterCropTransformUtilsExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CenterCropTransformUtilsExtension.swift; sourceTree = ""; }; A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceViewModel.swift; sourceTree = ""; }; A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshGPUSnapshot.swift; sourceTree = ""; }; A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = MeshPipeline.metal; sourceTree = ""; }; - A3DA4DB32EBAE05C005BB812 /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = ""; }; A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; - A3DA4DB52EBAE101005BB812 /* Stub.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Stub.m; sourceTree = ""; }; A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationMeshRecord.swift; sourceTree = ""; }; A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalContext.swift; sourceTree = ""; }; A3DC22E82DCF0F9A0020CE84 /* ImageProcessing.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = ImageProcessing.metal; sourceTree = ""; }; @@ -396,8 +394,6 @@ A3DC22F62DD032960020CE84 /* UnionOfMasks.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = UnionOfMasks.metal; sourceTree = ""; }; A3DC22F82DD0369E0020CE84 /* UnionOfMasksProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnionOfMasksProcessor.swift; sourceTree = ""; }; A3DC22FA2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DimensionBasedMaskFilter.swift; sourceTree = ""; }; - A3E161D12F3A8AEF002D4D08 /* CenterCropTransformUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CenterCropTransformUtils.swift; sourceTree = ""; }; - A3E161D32F3A991E002D4D08 /* CenterCropTransformUtils.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = CenterCropTransformUtils.metal; sourceTree = ""; }; A3E162772F3AFC63002D4D08 /* MeshCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshCoder.swift; sourceTree = ""; }; A3E6D2312F4649AD00DAF88E /* PngDecoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngDecoder.h; sourceTree = ""; }; A3E6D2322F464A2700DAF88E /* PngDecoder.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PngDecoder.mm; sourceTree = ""; }; @@ -437,39 +433,36 @@ DAA7F8B42CA38C11003666D8 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpinnerView.swift; sourceTree = ""; }; DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressBar.swift; sourceTree = ""; }; - DAA7F8C72CA76527003666D8 /* CIImageUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIImageUtils.swift; sourceTree = ""; }; - DAA7F8C92CA76550003666D8 /* CVPixelBufferUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CVPixelBufferUtils.swift; sourceTree = ""; }; DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GrayscaleToColorFilter.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ - A312FDA82FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; - membershipExceptions = ( - Sources/PointNMap/Geospatial/LocalizationProcessor.swift, - Sources/PointNMap/Geospatial/LocationHelpers.swift, - Sources/PointNMap/Geospatial/LocationManager.swift, - Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift, - Sources/PointNMap/Shared/Utils/Extensions.swift, + platformFiltersByRelativePath = { + PointNMapShared.h = ( + ios, + ); + }; + publicHeaders = ( + PointNMapShared.h, ); - target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; + target = A312FD7A2FA3391B0044808E /* PointNMapShared */; }; - A312FDA92FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + A312FE172FA3EBE80044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; - membershipExceptions = ( - Sources/PointNMap/Geospatial/LocalizationProcessor.swift, - Sources/PointNMap/Geospatial/LocationHelpers.swift, - Sources/PointNMap/Geospatial/LocationManager.swift, - Sources/PointNMap/Shared/Definitions/RasterizeConfig.swift, - Sources/PointNMap/Shared/Utils/Extensions.swift, + publicHeaders = ( + PointNMapShaderTypes.h, + ShaderTypes.h, ); - target = A312FD7A2FA3391B0044808E /* PointNMapShared */; + target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; }; /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ - A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FDA82FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDA92FA3393A0044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; + A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; A312FD8B2FA3391C0044808E /* PointNMapSharedTests */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = PointNMapSharedTests; sourceTree = ""; }; + A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE172FA3EBE80044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShaderTypes; sourceTree = ""; }; /* End PBXFileSystemSynchronizedRootGroup section */ /* Begin PBXFrameworksBuildPhase section */ @@ -477,6 +470,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + A312FE152FA3EBE80044808E /* PointNMapShaderTypes.framework in Frameworks */, A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */, A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */, A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */, @@ -501,6 +495,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -512,6 +507,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FE0A2FA3EBE80044808E /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ @@ -523,6 +525,8 @@ 3222F9332B622E0A0019A079 /* IOSAccessAssessmentUITests */, A312FD7C2FA3391B0044808E /* PointNMapShared */, A312FD8B2FA3391C0044808E /* PointNMapSharedTests */, + A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */, + A312FE1F2FA3EC710044808E /* Frameworks */, 3222F9172B622DFD0019A079 /* Products */, ); sourceTree = ""; @@ -535,6 +539,7 @@ 3222F9302B622E0A0019A079 /* IOSAccessAssessmentUITests.xctest */, A312FD7B2FA3391B0044808E /* PointNMapShared.framework */, A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */, + A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */, ); name = Products; sourceTree = ""; @@ -555,8 +560,6 @@ 55659C0B2BB786050094DF01 /* Shared */, 3222F91F2B622E090019A079 /* Preview Content */, 3222F9192B622DFD0019A079 /* IOSAccessAssessmentApp.swift */, - A3DA4DB32EBAE05C005BB812 /* ShaderTypes.h */, - A3DA4DB52EBAE101005BB812 /* Stub.m */, A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */, 3222F91D2B622E090019A079 /* Assets.xcassets */, ); @@ -702,8 +705,6 @@ A30801662EC0AE6B00B1BA3A /* Components */ = { isa = PBXGroup; children = ( - A3A45F0B2EE7A4F10029F5AE /* ContourDetectionPolicy.swift */, - A3A45F092EE7A4DE0029F5AE /* UnionOfMasksPolicy.swift */, A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */, A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */, ); @@ -750,6 +751,13 @@ path = Location; sourceTree = ""; }; + A312FE1F2FA3EC710044808E /* Frameworks */ = { + isa = PBXGroup; + children = ( + ); + name = Frameworks; + sourceTree = ""; + }; A31A1E772EAC49E3008B30B7 /* UI */ = { isa = PBXGroup; children = ( @@ -1075,7 +1083,7 @@ A38338C32EDAF25400F1A402 /* Attributes */ = { isa = PBXGroup; children = ( - A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttribute.swift */, + A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */, ); path = Attributes; sourceTree = ""; @@ -1083,9 +1091,7 @@ A38338C42EDAF3DC00F1A402 /* Depth */ = { isa = PBXGroup; children = ( - A312FD722FA336020044808E /* DepthFilter.swift */, - A312FD732FA336020044808E /* DepthFiltering.metal */, - A38338C52EDAF3E500F1A402 /* DepthMapProcessor.swift */, + A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */, ); path = Depth; sourceTree = ""; @@ -1237,20 +1243,6 @@ path = Utils; sourceTree = ""; }; - A3DA4DBF2EBE867F005BB812 /* Utils */ = { - isa = PBXGroup; - children = ( - A3281AED2F39501E0003E396 /* MTLTextureUtils.swift */, - A3C22FD22CF194A200533BF7 /* CGImageUtils.swift */, - DAA7F8C72CA76527003666D8 /* CIImageUtils.swift */, - DAA7F8C92CA76550003666D8 /* CVPixelBufferUtils.swift */, - A3E161D12F3A8AEF002D4D08 /* CenterCropTransformUtils.swift */, - A3C55A482EAFFAB600F6CFDC /* CenterCropTransformUtilsExtension.swift */, - A3E161D32F3A991E002D4D08 /* CenterCropTransformUtils.metal */, - ); - path = Utils; - sourceTree = ""; - }; A3DA4DC02EBE86C7005BB812 /* UnionOfMasks */ = { isa = PBXGroup; children = ( @@ -1350,7 +1342,6 @@ A3BB5AF92DB21080008673ED /* ImageProcessing */, A3DA4DC02EBE86C7005BB812 /* UnionOfMasks */, A362AEC72DB59577002D7598 /* Helpers */, - A3DA4DBF2EBE867F005BB812 /* Utils */, ); path = Image; sourceTree = ""; @@ -1365,6 +1356,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FE082FA3EBE80044808E /* Headers */ = { + isa = PBXHeadersBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXHeadersBuildPhase section */ /* Begin PBXNativeTarget section */ @@ -1381,6 +1379,7 @@ ); dependencies = ( A312FD8F2FA3391C0044808E /* PBXTargetDependency */, + A312FE142FA3EBE80044808E /* PBXTargetDependency */, ); name = IOSAccessAssessment; productName = IOSAccessAssessment; @@ -1435,6 +1434,7 @@ buildRules = ( ); dependencies = ( + A312FE1E2FA3EC5C0044808E /* PBXTargetDependency */, ); fileSystemSynchronizedGroups = ( A312FD7C2FA3391B0044808E /* PointNMapShared */, @@ -1470,6 +1470,29 @@ productReference = A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; }; + A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */ = { + isa = PBXNativeTarget; + buildConfigurationList = A312FE182FA3EBE80044808E /* Build configuration list for PBXNativeTarget "PointNMapShaderTypes" */; + buildPhases = ( + A312FE082FA3EBE80044808E /* Headers */, + A312FE092FA3EBE80044808E /* Sources */, + A312FE0A2FA3EBE80044808E /* Frameworks */, + A312FE0B2FA3EBE80044808E /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */, + ); + name = PointNMapShaderTypes; + packageProductDependencies = ( + ); + productName = PointNMapShaderTypes; + productReference = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; + productType = "com.apple.product-type.framework"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ @@ -1499,6 +1522,9 @@ CreatedOnToolsVersion = 26.0.1; TestTargetID = 3222F9152B622DFD0019A079; }; + A312FE0C2FA3EBE80044808E = { + CreatedOnToolsVersion = 26.0.1; + }; }; }; buildConfigurationList = 3222F9112B622DFD0019A079 /* Build configuration list for PBXProject "IOSAccessAssessment" */; @@ -1522,6 +1548,7 @@ 3222F92F2B622E0A0019A079 /* IOSAccessAssessmentUITests */, A312FD7A2FA3391B0044808E /* PointNMapShared */, A312FD842FA3391C0044808E /* PointNMapSharedTests */, + A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */, ); }; /* End PBXProject section */ @@ -1565,6 +1592,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FE0B2FA3EBE80044808E /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -1581,7 +1615,6 @@ A3B2DDBF2DC99DEF003416FB /* HomographyRequestProcessor.swift in Sources */, A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */, CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */, - A3DA4DB62EBAE101005BB812 /* Stub.m in Sources */, A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */, A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */, A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */, @@ -1609,10 +1642,10 @@ A30BED382ED162E7004A5B51 /* MeshDefinitions.swift in Sources */, A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */, A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */, - A3C55A492EAFFABF00F6CFDC /* CenterCropTransformUtilsExtension.swift in Sources */, A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */, + A312FDCD2FA3DBD50044808E /* DepthMapProcessorExtension.swift in Sources */, A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */, A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */, CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, @@ -1621,7 +1654,7 @@ A3DC22FB2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift in Sources */, A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */, A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */, - A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttribute.swift in Sources */, + A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */, A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */, A35BB2862DC30386009A3FE0 /* CameraOrientation.swift in Sources */, @@ -1635,16 +1668,13 @@ A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */, A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */, - DAA7F8C82CA76527003666D8 /* CIImageUtils.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, - A3A45F0A2EE7A4E10029F5AE /* UnionOfMasksPolicy.swift in Sources */, A3DC22F72DD032960020CE84 /* UnionOfMasks.metal in Sources */, A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */, A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */, A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */, A3B5BDA02F831F270036C6EC /* ProjectionUtils.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, - A3C22FD32CF194A600533BF7 /* CGImageUtils.swift in Sources */, A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */, A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */, A3DC22E92DCF0F9A0020CE84 /* ImageProcessing.metal in Sources */, @@ -1664,7 +1694,6 @@ A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */, A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, - DAA7F8CA2CA76550003666D8 /* CVPixelBufferUtils.swift in Sources */, DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */, A3EE6E4C2F580E2B00F515E6 /* DatasetLister.swift in Sources */, A3EE6EFE2F69287F00F515E6 /* LocationFromMeshExtension.swift in Sources */, @@ -1677,15 +1706,12 @@ CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */, - A3A45F0C2EE7A4F40029F5AE /* ContourDetectionPolicy.swift in Sources */, A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */, A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */, A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */, A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */, A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, A30801502EC0926800B1BA3A /* ContourUtils.swift in Sources */, - A312FD742FA336020044808E /* DepthFilter.swift in Sources */, - A312FD752FA336020044808E /* DepthFiltering.metal in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */, @@ -1696,17 +1722,14 @@ A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */, A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, A3C1D7392F84A84900833411 /* SurfaceNormals.metal in Sources */, - A38338C62EDAF3E900F1A402 /* DepthMapProcessor.swift in Sources */, A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */, A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */, - A3281AEE2F3950210003E396 /* MTLTextureUtils.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, - A3E161D22F3A8AF6002D4D08 /* CenterCropTransformUtils.swift in Sources */, A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, @@ -1725,7 +1748,6 @@ A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, - A3E161D42F3A9922002D4D08 /* CenterCropTransformUtils.metal in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, A3DC22ED2DCF10050020CE84 /* Homography.metal in Sources */, A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */, @@ -1781,6 +1803,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + A312FE092FA3EBE80044808E /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ @@ -1809,6 +1838,16 @@ target = A312FD7A2FA3391B0044808E /* PointNMapShared */; targetProxy = A312FD8E2FA3391C0044808E /* PBXContainerItemProxy */; }; + A312FE142FA3EBE80044808E /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; + targetProxy = A312FE132FA3EBE80044808E /* PBXContainerItemProxy */; + }; + A312FE1E2FA3EC5C0044808E /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; + targetProxy = A312FE1D2FA3EC5C0044808E /* PBXContainerItemProxy */; + }; /* End PBXTargetDependency section */ /* Begin XCBuildConfiguration section */ @@ -1947,7 +1986,10 @@ ENABLE_PREVIEWS = YES; FRAMEWORK_SEARCH_PATHS = "$(inherited)"; GENERATE_INFOPLIST_FILE = YES; - HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; + HEADER_SEARCH_PATHS = ( + "$(SRCROOT)/IOSAccessAssessment", + "$(SRCROOT)/PointNMapShaderTypes", + ); INFOPLIST_FILE = IOSAccessAssessment/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = iOSPointMapper; INFOPLIST_KEY_LSSupportsOpeningDocumentsInPlace = YES; @@ -1966,7 +2008,7 @@ "@executable_path/Frameworks", ); MARKETING_VERSION = 0.3; - MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; + MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment $(SRCROOT)/PointNMapShaderTypes"; OTHER_CFLAGS = "-DACCELERATE_NEW_LAPACK"; PRODUCT_BUNDLE_IDENTIFIER = edu.uw.pointmapper; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -1995,7 +2037,10 @@ ENABLE_PREVIEWS = YES; FRAMEWORK_SEARCH_PATHS = "$(inherited)"; GENERATE_INFOPLIST_FILE = YES; - HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; + HEADER_SEARCH_PATHS = ( + "$(SRCROOT)/IOSAccessAssessment", + "$(SRCROOT)/PointNMapShaderTypes", + ); INFOPLIST_FILE = IOSAccessAssessment/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = iOSPointMapper; INFOPLIST_KEY_LSSupportsOpeningDocumentsInPlace = YES; @@ -2014,7 +2059,7 @@ "@executable_path/Frameworks", ); MARKETING_VERSION = 0.3; - MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment"; + MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment $(SRCROOT)/PointNMapShaderTypes"; OTHER_CFLAGS = "-DACCELERATE_NEW_LAPACK"; PRODUCT_BUNDLE_IDENTIFIER = edu.uw.pointmapper; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -2117,17 +2162,23 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; ENABLE_MODULE_VERIFIER = YES; GENERATE_INFOPLIST_FILE = YES; + HEADER_SEARCH_PATHS = ( + "$(SRCROOT)/PointNMapShared", + "$(SRCROOT)/PointNMapShaderTypes", + ); INFOPLIST_KEY_NSHumanReadableCopyright = ""; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 26.0; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", "@loader_path/Frameworks", ); MARKETING_VERSION = 1.0; + MODULEMAP_FILE = ""; MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/PointNMapShared $(SRCROOT)/PointNMapShaderTypes"; PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShared; PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; SKIP_INSTALL = YES; @@ -2136,6 +2187,7 @@ SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_INSTALL_MODULE = YES; SWIFT_INSTALL_OBJC_HEADER = NO; + SWIFT_OBJC_BRIDGING_HEADER = ""; SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; @@ -2156,17 +2208,23 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; ENABLE_MODULE_VERIFIER = YES; GENERATE_INFOPLIST_FILE = YES; + HEADER_SEARCH_PATHS = ( + "$(SRCROOT)/PointNMapShared", + "$(SRCROOT)/PointNMapShaderTypes", + ); INFOPLIST_KEY_NSHumanReadableCopyright = ""; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 26.0; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", "@loader_path/Frameworks", ); MARKETING_VERSION = 1.0; + MODULEMAP_FILE = ""; MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/PointNMapShared $(SRCROOT)/PointNMapShaderTypes"; PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShared; PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; SKIP_INSTALL = YES; @@ -2175,6 +2233,7 @@ SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_INSTALL_MODULE = YES; SWIFT_INSTALL_OBJC_HEADER = NO; + SWIFT_OBJC_BRIDGING_HEADER = ""; SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; @@ -2225,6 +2284,74 @@ }; name = Release; }; + A312FE192FA3EBE80044808E /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEFINES_MODULE = YES; + DEVELOPMENT_TEAM = G8MQVE5WWW; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + DYLIB_INSTALL_NAME_BASE = "@rpath"; + ENABLE_MODULE_VERIFIER = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; + MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShaderTypes; + PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; + SKIP_INSTALL = YES; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_EMIT_LOC_STRINGS = YES; + TARGETED_DEVICE_FAMILY = "1,2"; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + }; + name = Debug; + }; + A312FE1A2FA3EBE80044808E /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEFINES_MODULE = YES; + DEVELOPMENT_TEAM = G8MQVE5WWW; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + DYLIB_INSTALL_NAME_BASE = "@rpath"; + ENABLE_MODULE_VERIFIER = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSHumanReadableCopyright = ""; + INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; + IPHONEOS_DEPLOYMENT_TARGET = 26.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++"; + MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20"; + PRODUCT_BUNDLE_IDENTIFIER = himanshunaidu.PointNMapShaderTypes; + PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; + SKIP_INSTALL = YES; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_EMIT_LOC_STRINGS = YES; + TARGETED_DEVICE_FAMILY = "1,2"; + VERSIONING_SYSTEM = "apple-generic"; + VERSION_INFO_PREFIX = ""; + }; + name = Release; + }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ @@ -2282,6 +2409,15 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; + A312FE182FA3EBE80044808E /* Build configuration list for PBXNativeTarget "PointNMapShaderTypes" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + A312FE192FA3EBE80044808E /* Debug */, + A312FE1A2FA3EBE80044808E /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ /* Begin XCRemoteSwiftPackageReference section */ diff --git a/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift b/IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttributeExtension.swift similarity index 100% rename from IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttribute.swift rename to IOSAccessAssessment/AccessibilityFeature/Attributes/AccessibilityFeatureAttributeExtension.swift diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessor.swift b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift similarity index 54% rename from IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessor.swift rename to IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift index ba3e1efe..33a4241e 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessor.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift @@ -1,85 +1,13 @@ // -// DepthMapProcessor.swift +// DepthMapProcessorExtension.swift // IOSAccessAssessment // -// Created by Himanshu on 11/29/25. +// Created by Himanshu on 4/30/26. // -import CoreImage -import CoreVideo +import PointNMapShared -enum DepthMapProcessorError: Error, LocalizedError { - case unableToAccessDepthData - case invalidDepth - - var errorDescription: String? { - switch self { - case .unableToAccessDepthData: - return "Unable to access depth data from the depth map." - case .invalidDepth: - return "The depth value retrieved is invalid." - } - } -} - -struct DepthMapProcessor { - let depthImage: CIImage - - private let context: CIContext - - private let depthWidth: Int - private let depthHeight: Int - private let depthBuffer: CVPixelBuffer - - init(depthImage: CIImage) throws { - self.depthImage = depthImage - self.context = CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - self.depthWidth = Int(depthImage.extent.width) - self.depthHeight = Int(depthImage.extent.height) - self.depthBuffer = try depthImage.toPixelBuffer( - context: context, - pixelFormatType: kCVPixelFormatType_DepthFloat32, - colorSpace: nil - ) - } - - private func getDepthAtPoint(point: CGPoint) throws -> Float { - CVPixelBufferLockBaseAddress(depthBuffer, .readOnly) - defer { CVPixelBufferUnlockBaseAddress(depthBuffer, .readOnly) } - - guard let depthBaseAddress = CVPixelBufferGetBaseAddress(depthBuffer) else { - throw DepthMapProcessorError.unableToAccessDepthData - } - let depthBytesPerRow = CVPixelBufferGetBytesPerRow(depthBuffer) - let depthBuffer = depthBaseAddress.assumingMemoryBound(to: Float.self) - - let depthIndexRow = Int(point.y) - let depthIndexCol = Int(point.x) - let depthIndex = depthIndexRow * (depthBytesPerRow / MemoryLayout.size) + depthIndexCol - let depthAtPoint = depthBuffer[depthIndex] - return depthAtPoint - } - - private func getDepthsAtPoints(points: [CGPoint]) throws -> [Float] { - CVPixelBufferLockBaseAddress(depthBuffer, .readOnly) - defer { CVPixelBufferUnlockBaseAddress(depthBuffer, .readOnly) } - - guard let depthBaseAddress = CVPixelBufferGetBaseAddress(depthBuffer) else { - throw DepthMapProcessorError.unableToAccessDepthData - } - let depthBytesPerRow = CVPixelBufferGetBytesPerRow(depthBuffer) - let depthBuffer = depthBaseAddress.assumingMemoryBound(to: Float.self) - - var depths: [Float] = points.map { _ in 0.0 } - for (index, point) in points.enumerated() { - let depthIndexRow = Int(point.y) - let depthIndexCol = Int(point.x) - let depthIndex = depthIndexRow * (depthBytesPerRow / MemoryLayout.size) + depthIndexCol - depths[index] = depthBuffer[depthIndex] - } - return depths - } - +extension DepthMapProcessor { /** Retrieves the depth value at the centroid of the given accessibility feature. @@ -161,15 +89,4 @@ struct DepthMapProcessor { let depths = try getDepthsAtPoints(points: featureBoundPoints) return depths } - - func getFeatureDepthsAtNormalizedPoints(_ points: [SIMD2]) throws -> [Float] { - let featurePoints: [CGPoint] = points.map { point in - CGPoint( - x: CGFloat(point.x * Float(depthWidth)), - y: CGFloat((1 - point.y) * Float(depthHeight)) - ) - } - let depths = try getDepthsAtPoints(points: featurePoints) - return depths - } } diff --git a/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift b/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift index a7e3847d..6892fec1 100644 --- a/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift +++ b/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift @@ -8,6 +8,7 @@ import UIKit import Metal import CoreImage import MetalKit +import PointNMapShared enum UnionOfMasksProcessorError: Error, LocalizedError { case metalInitializationFailed diff --git a/PointNMapShaderTypes/PointNMapShaderTypes.docc/PointNMapShaderTypes.md b/PointNMapShaderTypes/PointNMapShaderTypes.docc/PointNMapShaderTypes.md new file mode 100644 index 00000000..dca2aa1f --- /dev/null +++ b/PointNMapShaderTypes/PointNMapShaderTypes.docc/PointNMapShaderTypes.md @@ -0,0 +1,13 @@ +# ``PointNMapShaderTypes`` + +Summary + +## Overview + +Text + +## Topics + +### Group + +- ``Symbol`` \ No newline at end of file diff --git a/PointNMapShaderTypes/PointNMapShaderTypes.h b/PointNMapShaderTypes/PointNMapShaderTypes.h new file mode 100644 index 00000000..6f30af06 --- /dev/null +++ b/PointNMapShaderTypes/PointNMapShaderTypes.h @@ -0,0 +1,17 @@ +// +// PointNMapShaderTypes.h +// PointNMapShaderTypes +// +// Created by Himanshu on 4/30/26. +// + +#import + +//! Project version number for PointNMapShaderTypes. +FOUNDATION_EXPORT double PointNMapShaderTypesVersionNumber; + +//! Project version string for PointNMapShaderTypes. +FOUNDATION_EXPORT const unsigned char PointNMapShaderTypesVersionString[]; + +// In this header, you should import all the public headers of your framework using statements like #import +#import diff --git a/IOSAccessAssessment/ShaderTypes.h b/PointNMapShaderTypes/ShaderTypes.h similarity index 100% rename from IOSAccessAssessment/ShaderTypes.h rename to PointNMapShaderTypes/ShaderTypes.h diff --git a/IOSAccessAssessment/Stub.m b/PointNMapShaderTypes/Stub.m similarity index 82% rename from IOSAccessAssessment/Stub.m rename to PointNMapShaderTypes/Stub.m index a69cf1e8..e35cd3a3 100644 --- a/IOSAccessAssessment/Stub.m +++ b/PointNMapShaderTypes/Stub.m @@ -6,3 +6,4 @@ // #import +#import "ShaderTypes.h" diff --git a/PointNMapShared/PointNMapShared.h b/PointNMapShared/PointNMapShared.h new file mode 100644 index 00000000..4bd8af7c --- /dev/null +++ b/PointNMapShared/PointNMapShared.h @@ -0,0 +1,15 @@ +// +// PointNMapShared.h +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// + +#ifndef PointNMapShared_h +#define PointNMapShared_h + + +#endif /* PointNMapShared_h */ + +#import +#import diff --git a/PointNMapShared/PointNMapShared.swift b/PointNMapShared/PointNMapShared.swift index 8959ce4a..533cee4d 100644 --- a/PointNMapShared/PointNMapShared.swift +++ b/PointNMapShared/PointNMapShared.swift @@ -7,3 +7,10 @@ import Foundation +public final class PointNMapSharedBundleToken {} + +public enum PointNMapSharedResources { + public static var bundle: Bundle { + Bundle(for: PointNMapSharedBundleToken.self) + } +} diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/ContourDetectionPolicy.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/ContourDetectionPolicy.swift new file mode 100644 index 00000000..c6c6174d --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/ContourDetectionPolicy.swift @@ -0,0 +1,25 @@ +// +// ContourDetectionPolicy.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/8/25. +// + +/** + Policy for contour detection in segmentation masks + + Attributes: + - epsilon: Factor to determine the approximation accuracy for contour detection + - perimeterThreshold: Minimum normalized perimeter for a contour to be considered valid + */ +public struct ContourDetectionPolicy: Sendable, Codable, Equatable, Hashable { + public let epsilon: Float + public let perimeterThreshold: Float +} + +public extension ContourDetectionPolicy { + static let `default` = ContourDetectionPolicy( + epsilon: 0.01, + perimeterThreshold: 0.01 + ) +} diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/UnionOfMasksPolicy.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/UnionOfMasksPolicy.swift new file mode 100644 index 00000000..896b26c5 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/UnionOfMasksPolicy.swift @@ -0,0 +1,34 @@ +// +// UnionOfMasksPolicy.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/8/25. +// + +/** + Policy for combining segmentation masks across multiple frames + + Attributes: + - threshold: Minimum number of frames that need to have a class label for it to be considered valid + - defaultFrameWeight: Weight for the default frame when calculating the union of masks + - lastFrameWeight: Weight for the last frame when calculating the union of masks + */ +public struct UnionOfMasksPolicy: Sendable, Codable, Equatable, Hashable { + public let threshold: Float + public let defaultFrameWeight: Float + public let lastFrameWeight: Float + + public init(threshold: Float, defaultFrameWeight: Float, lastFrameWeight: Float) { + self.threshold = threshold + self.defaultFrameWeight = defaultFrameWeight + self.lastFrameWeight = lastFrameWeight + } +} + +public extension UnionOfMasksPolicy { + static let `default` = UnionOfMasksPolicy( + threshold: 0.6, + defaultFrameWeight: 1.0, + lastFrameWeight: 2.0 + ) +} diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFilter.swift similarity index 96% rename from IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFilter.swift index 46054419..f046fd1f 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFilter.swift @@ -9,6 +9,7 @@ import UIKit import Metal import CoreImage import MetalKit +import PointNMapShaderTypes public enum DepthFilterError: Error, LocalizedError { case metalInitializationFailed @@ -56,7 +57,8 @@ public struct DepthFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "depthFilteringKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "depthFilteringKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw DepthFilterError.metalInitializationFailed } diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFiltering.metal similarity index 96% rename from IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFiltering.metal index 711493b8..175d01c2 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthFiltering.metal +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthFiltering.metal @@ -8,7 +8,7 @@ #include #include using namespace metal; -#import "../../../ShaderTypes.h" +#import "ShaderTypes.h" extern "C" kernel void depthFilteringKernel( texture2d inputTexture [[texture(0)]], diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift new file mode 100644 index 00000000..73c6d940 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift @@ -0,0 +1,93 @@ +// +// DepthMapProcessor.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/29/25. +// + +import CoreImage +import CoreVideo + +public enum DepthMapProcessorError: Error, LocalizedError { + case unableToAccessDepthData + case invalidDepth + + public var errorDescription: String? { + switch self { + case .unableToAccessDepthData: + return "Unable to access depth data from the depth map." + case .invalidDepth: + return "The depth value retrieved is invalid." + } + } +} + +public struct DepthMapProcessor { + public let depthImage: CIImage + + public let context: CIContext + + public let depthWidth: Int + public let depthHeight: Int + public let depthBuffer: CVPixelBuffer + + public init(depthImage: CIImage) throws { + self.depthImage = depthImage + self.context = CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) + self.depthWidth = Int(depthImage.extent.width) + self.depthHeight = Int(depthImage.extent.height) + self.depthBuffer = try depthImage.toPixelBuffer( + context: context, + pixelFormatType: kCVPixelFormatType_DepthFloat32, + colorSpace: nil + ) + } + + public func getDepthAtPoint(point: CGPoint) throws -> Float { + CVPixelBufferLockBaseAddress(depthBuffer, .readOnly) + defer { CVPixelBufferUnlockBaseAddress(depthBuffer, .readOnly) } + + guard let depthBaseAddress = CVPixelBufferGetBaseAddress(depthBuffer) else { + throw DepthMapProcessorError.unableToAccessDepthData + } + let depthBytesPerRow = CVPixelBufferGetBytesPerRow(depthBuffer) + let depthBuffer = depthBaseAddress.assumingMemoryBound(to: Float.self) + + let depthIndexRow = Int(point.y) + let depthIndexCol = Int(point.x) + let depthIndex = depthIndexRow * (depthBytesPerRow / MemoryLayout.size) + depthIndexCol + let depthAtPoint = depthBuffer[depthIndex] + return depthAtPoint + } + + public func getDepthsAtPoints(points: [CGPoint]) throws -> [Float] { + CVPixelBufferLockBaseAddress(depthBuffer, .readOnly) + defer { CVPixelBufferUnlockBaseAddress(depthBuffer, .readOnly) } + + guard let depthBaseAddress = CVPixelBufferGetBaseAddress(depthBuffer) else { + throw DepthMapProcessorError.unableToAccessDepthData + } + let depthBytesPerRow = CVPixelBufferGetBytesPerRow(depthBuffer) + let depthBuffer = depthBaseAddress.assumingMemoryBound(to: Float.self) + + var depths: [Float] = points.map { _ in 0.0 } + for (index, point) in points.enumerated() { + let depthIndexRow = Int(point.y) + let depthIndexCol = Int(point.x) + let depthIndex = depthIndexRow * (depthBytesPerRow / MemoryLayout.size) + depthIndexCol + depths[index] = depthBuffer[depthIndex] + } + return depths + } + + public func getFeatureDepthsAtNormalizedPoints(_ points: [SIMD2]) throws -> [Float] { + let featurePoints: [CGPoint] = points.map { point in + CGPoint( + x: CGFloat(point.x * Float(depthWidth)), + y: CGFloat((1 - point.y) * Float(depthHeight)) + ) + } + let depths = try getDepthsAtPoints(points: featurePoints) + return depths + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift new file mode 100644 index 00000000..772874de --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift @@ -0,0 +1,19 @@ +// +// CGImageUtils.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/22/24. +// + +import UIKit + +extension CGImage { + // TODO: Need to check if this is applicable to all image types + func getByteSize() -> Int { + var bytesPerRow: Int = 4 * self.width + if (bytesPerRow % 16 != 0) { + bytesPerRow = ((bytesPerRow / 16) + 1) * 16; + } + return self.height * bytesPerRow; + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift new file mode 100644 index 00000000..b15dc9df --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift @@ -0,0 +1,215 @@ +// +// CIImageUtils.swift +// IOSAccessAssessment +// +// Created by TCAT on 9/27/24. +// + +import UIKit +import MetalKit + +enum CIImageUtilsError: Error, LocalizedError { + case pixelBufferCreationError + case segmentationTextureError + + var errorDescription: String? { + switch self { + case .pixelBufferCreationError: + return "Failed to create pixel buffer from CIImage." + case .segmentationTextureError: + return "Failed to create segmentation texture." + } + } +} + +extension CIImage { + func croppedToCenter(size: CGSize) -> CIImage { + let x = (extent.width - size.width) / 2 + let y = (extent.height - size.height) / 2 + let cropRect = CGRect(x: x, y: y, width: size.width, height: size.height) + let croppedImage = cropped(to: cropRect) + + let centeredImage = croppedImage.transformed(by: CGAffineTransform(translationX: -x, y: -y)) + return centeredImage + } + + /// Returns a resized image. + func resized(to size: CGSize) -> CIImage { + let outputScaleX = size.width / extent.width + let outputScaleY = size.height / extent.height + var outputImage = self.transformed(by: CGAffineTransform(scaleX: outputScaleX, y: outputScaleY)) + outputImage = outputImage.transformed( + by: CGAffineTransform(translationX: -outputImage.extent.origin.x, y: -outputImage.extent.origin.y) + ) + return outputImage + } +} + +/** + Extensions for converting CIImage to CVPixelBuffer. + */ +extension CIImage { + func toPixelBuffer( + context: CIContext, pixelFormatType: OSType = kCVPixelFormatType_32BGRA, colorSpace: CGColorSpace? = nil + ) throws -> CVPixelBuffer { + let width = Int(self.extent.width) + let height = Int(self.extent.height) + + var pixelBuffer: CVPixelBuffer? + let attrs: [CFString: Any] = [ + kCVPixelBufferCGImageCompatibilityKey: true, + kCVPixelBufferCGBitmapContextCompatibilityKey: true, + kCVPixelBufferMetalCompatibilityKey: true, + kCVPixelBufferIOSurfacePropertiesKey: [:] + ] + let status = CVPixelBufferCreate( + kCFAllocatorDefault, + width, + height, + pixelFormatType, + attrs as CFDictionary, + &pixelBuffer + ) + + guard status == kCVReturnSuccess, let buffer = pixelBuffer else { + throw CIImageUtilsError.pixelBufferCreationError + } + context.render(self, to: buffer, bounds: self.extent, colorSpace: colorSpace) + return buffer + } + + func toPixelBuffer( + context: CIContext, pixelBufferPool: CVPixelBufferPool, colorSpace: CGColorSpace? = nil + ) throws -> CVPixelBuffer { + var pixelBufferOut: CVPixelBuffer? + let status = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &pixelBufferOut) + guard status == kCVReturnSuccess, let pixelBuffer = pixelBufferOut else { + throw CIImageUtilsError.pixelBufferCreationError + } + context.render(self, to: pixelBuffer, bounds: self.extent, colorSpace: colorSpace) + return pixelBuffer + } +} + +/** + Supporting enum for CIImage to MTLTexture conversion. + */ +enum CIImageToMTLTextureOrientation: Sendable { + case cICanonical + case metalTopLeft +} + +/** + Extensions for converting CIImage to MTLTexture. + */ +extension CIImage { + /** + Converts the CIImage to a MTLTexture using the provided device, command buffer, pixel format, CIContext, and color space. + + Performs a direct conversion by rendering the CIImage into a newly created MTLTexture. + + - WARNING: + This method has a vertical mirroring issue, thanks to the way MTLTexture coordinates conflict with CIImage coordinates. + For now, the caller has the responsibility of deciding whether it wants to follow CIImage's coordinate system or MTLTexture's coordinate system. + This will be expressed using the custom enum `CIImageToMTLTextureOrientation`. + */ + func toMTLTexture( + device: MTLDevice, commandBuffer: MTLCommandBuffer, + pixelFormat: MTLPixelFormat, + context: CIContext, colorSpace: CGColorSpace, + cIImageToMTLTextureOrientation: CIImageToMTLTextureOrientation = .cICanonical + ) throws -> MTLTexture { + let mtlDescriptor: MTLTextureDescriptor = MTLTextureDescriptor.texture2DDescriptor( + pixelFormat: pixelFormat, + width: Int(self.extent.width), height: Int(self.extent.height), + mipmapped: false + ) + /// TODO: Make this configurable if needed + mtlDescriptor.usage = [.shaderRead, .shaderWrite] + guard let texture = device.makeTexture(descriptor: mtlDescriptor) else { + throw CIImageUtilsError.segmentationTextureError + } + let imageOriented: CIImage + switch cIImageToMTLTextureOrientation { + case .cICanonical: + imageOriented = self + case .metalTopLeft: + imageOriented = self + .transformed(by: CGAffineTransform(scaleX: 1, y: -1)) + .transformed(by: CGAffineTransform(translationX: 0, y: self.extent.height)) + } + context.render( + imageOriented, + to: texture, + commandBuffer: commandBuffer, + bounds: self.extent, + colorSpace: colorSpace + ) + return texture + } + + /** + Converts the CIImage to a MTLTexture using the provided MTKTextureLoader and CIContext. + + This method creates a CGImage from the CIImage and then uses the texture loader to create the MTLTexture. + + - WARNING: + Seems to have mirroring issues. + */ + func toMTLTexture( + textureLoader: MTKTextureLoader, + context: CIContext + ) throws -> MTLTexture { + guard let cgImage = context.createCGImage(self, from: self.extent) else { + throw CIImageUtilsError.segmentationTextureError + } + let options: [MTKTextureLoader.Option: Any] = [.origin: MTKTextureLoader.Origin.bottomLeft] + return try textureLoader.newTexture(cgImage: cgImage, options: options) + } +} + +/** + Debugging functions + */ +extension CIImage { + /** + Function to check if the CIImage has backed data. This is useful for debugging purposes to check what the CIImage is backed by, and the relevant formats. + */ + func checkBackedData() { + if let cs = self.colorSpace { + print("CIImage is backed by color space: \(cs)") + } else { + print("CIImage has no color space") + } + + if self.properties.isEmpty { + print("CIImage has no properties") + } else { + print("CIImage properties: \(self.properties)") + } + + if let pb = self.pixelBuffer { + print("CIImage is backed by pixel buffer") + print(" Format: \(CVPixelBufferGetPixelFormatType(pb)) \(pb.pixelFormatName())") + print(" Size: \(CVPixelBufferGetWidth(pb))x\(CVPixelBufferGetHeight(pb))") + } else { + print("CIImage is not backed by a pixel buffer") + } + + if let mtl = self.metalTexture { + print("CIImage is backed by metal texture") + print(" Format: \(mtl.pixelFormat) \(mtl.pixelFormatName())") + print(" Size: \(mtl.width)x\(mtl.height)") + } else { + print("CIImage is not backed by a metal texture") + } + + if let cg = self.cgImage { + print("CIImage is backed by CGImage") + print(" Format: \(String(cg.colorSpace?.name ?? "Unknown"))") + print(" Size: \(cg.width)x\(cg.height)") + } else { + print("CIImage is not backed by a CGImage") + } + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift new file mode 100644 index 00000000..82437cd0 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift @@ -0,0 +1,301 @@ +// +// CVPixelBufferUtils.swift +// IOSAccessAssessment +// +// Created by TCAT on 9/27/24. +// + +import UIKit +import Accelerate + +struct CVPixelBufferUtils { + /** + This function creates a CVPixelBuffer with the specified width, height, and pixel format. + */ + static func createPixelBuffer(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_DepthFloat32) -> CVPixelBuffer? { + var pixelBuffer: CVPixelBuffer? + let attrs = [ + kCVPixelBufferCGImageCompatibilityKey: true, + kCVPixelBufferCGBitmapContextCompatibilityKey: true, + kCVPixelBufferMetalCompatibilityKey: true, + kCVPixelBufferIOSurfacePropertiesKey: [:] + ] as CFDictionary + let status = CVPixelBufferCreate( + kCFAllocatorDefault, + width, + height, + pixelFormat, + attrs as CFDictionary, + &pixelBuffer + ) + if status != kCVReturnSuccess { + print("Failed to create pixel buffer") + return nil + } + return pixelBuffer + } + + static func createBlankDepthPixelBuffer(targetSize: CGSize) -> CVPixelBuffer? { + let width = Int(targetSize.width) + let height = Int(targetSize.height) + + let pixelBuffer: CVPixelBuffer? = createPixelBuffer(width: width, height: height, pixelFormat: kCVPixelFormatType_DepthFloat32) + + guard let blankPixelBuffer = pixelBuffer else { return nil } + + CVPixelBufferLockBaseAddress(blankPixelBuffer, []) + let blankBaseAddress = CVPixelBufferGetBaseAddress(blankPixelBuffer)! + let blankBufferPointer = blankBaseAddress.bindMemory(to: Float.self, capacity: width * height) + vDSP_vclr(blankBufferPointer, 1, vDSP_Length(width * height)) + CVPixelBufferUnlockBaseAddress(blankPixelBuffer, []) + + return blankPixelBuffer + } + + /** + This function extracts unique grayscale values from a pixel buffer, + gets the indices of these values from Constants.SelectedAccessibilityFeatureConfig.grayscaleValues, + and returns both the unique values and their corresponding indices. + */ + static func extractUniqueGrayscaleValues(from pixelBuffer: CVPixelBuffer) -> Set { + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) } + + guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { + return Set() + } + + var buffer = vImage_Buffer(data: baseAddress, + height: vImagePixelCount(CVPixelBufferGetHeight(pixelBuffer)), + width: vImagePixelCount(CVPixelBufferGetWidth(pixelBuffer)), + rowBytes: CVPixelBufferGetBytesPerRow(pixelBuffer)) + var histogram = [vImagePixelCount](repeating: 0, count: 256) + let histogramError = vImageHistogramCalculation_Planar8(&buffer, &histogram, vImage_Flags(kvImageNoFlags)) + guard histogramError == kvImageNoError else { return Set() } + + var uniqueValues = Set() + for i in 0.. 0 { + uniqueValues.insert(UInt8(i)) + } + } + return uniqueValues + } +} + +/** + Archived methods to be removed later if not needed. + */ +extension CVPixelBufferUtils { + // TODO: Check if any of the methods can be sped up using GPU + // TODO: Check if the forced unwrapping used all over the functions is safe in the given context + static func cropCenterOfPixelBuffer(_ pixelBuffer: CVPixelBuffer, cropSize: CGSize) -> CVPixelBuffer? { + let width = CVPixelBufferGetWidth(pixelBuffer) + let height = CVPixelBufferGetHeight(pixelBuffer) + let cropX = (Float(width) - Float(cropSize.width)) / 2 + let cropY = (Float(height) - Float(cropSize.height)) / 2 + var croppedPixelBuffer: CVPixelBuffer? + let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(cropSize.width), Int(cropSize.height), CVPixelBufferGetPixelFormatType(pixelBuffer), nil, &croppedPixelBuffer) + guard status == kCVReturnSuccess, let outputBuffer = croppedPixelBuffer else { return nil } + + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + CVPixelBufferLockBaseAddress(outputBuffer, []) + + let inputBaseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)! + let outputBaseAddress = CVPixelBufferGetBaseAddress(outputBuffer)! + + let inputBytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) + let outputBytesPerRow = CVPixelBufferGetBytesPerRow(outputBuffer) + + let cropXOffset = Int(cropX) * 4 + let cropYOffset = Int(cropY) * inputBytesPerRow + for y in 0.. CVPixelBuffer? { + var resizedPixelBuffer: CVPixelBuffer? + let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, CVPixelBufferGetPixelFormatType(pixelBuffer), nil, &resizedPixelBuffer) + guard status == kCVReturnSuccess, let outputBuffer = resizedPixelBuffer else { return nil } + + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + CVPixelBufferLockBaseAddress(outputBuffer, []) + + let inputBaseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)! + let outputBaseAddress = CVPixelBufferGetBaseAddress(outputBuffer)! + + var inBuffer = vImage_Buffer(data: inputBaseAddress, + height: vImagePixelCount(CVPixelBufferGetHeight(pixelBuffer)), + width: vImagePixelCount(CVPixelBufferGetWidth(pixelBuffer)), + rowBytes: CVPixelBufferGetBytesPerRow(pixelBuffer)) + + var outBuffer = vImage_Buffer(data: outputBaseAddress, + height: vImagePixelCount(height), + width: vImagePixelCount(width), + rowBytes: CVPixelBufferGetBytesPerRow(outputBuffer)) + + let scaleError = vImageScale_ARGB8888(&inBuffer, &outBuffer, nil, vImage_Flags(0)) + guard scaleError == kvImageNoError else { return nil } + + CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) + CVPixelBufferUnlockBaseAddress(outputBuffer, []) + + return outputBuffer + } + + static func resizeAndCropPixelBuffer(_ pixelBuffer: CVPixelBuffer, targetSize: CGSize, cropSize: CGSize) -> CVPixelBuffer? { + guard let resizedPixelBuffer = resizePixelBuffer(pixelBuffer, width: Int(targetSize.width), height: Int(targetSize.height)) else { + return nil + } + return cropCenterOfPixelBuffer(resizedPixelBuffer, cropSize: cropSize) + } + + /// Temporary function to get the average value of a pixel in a depth image + /// Only used for debugging purposes + static func averagePixelBufferValue(in pixelBuffer: CVPixelBuffer) -> Float32? { + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + defer { + CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) + } + + guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { + return nil + } + + let pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer) + guard pixelFormat == kCVPixelFormatType_DepthFloat32 else { + print("Unsupported pixel format: \(pixelFormat)") + return nil + } + + let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) + let floatBuffer = baseAddress.assumingMemoryBound(to: Float32.self) + + let width = CVPixelBufferGetWidth(pixelBuffer) + let height = CVPixelBufferGetHeight(pixelBuffer) + + let totalPixels = width * height + var sum: Float32 = 0 + for y in 0...size + x + sum += floatBuffer[index] + } + } + + return sum / Float32(totalPixels) + } +} + +extension CVPixelBuffer { + /** + Returns a string representation of the pixel format type of the pixel buffer. + */ + func pixelFormatName() -> String { + let p = CVPixelBufferGetPixelFormatType(self) + switch p { + case kCVPixelFormatType_1Monochrome: return "kCVPixelFormatType_1Monochrome" + case kCVPixelFormatType_2Indexed: return "kCVPixelFormatType_2Indexed" + case kCVPixelFormatType_4Indexed: return "kCVPixelFormatType_4Indexed" + case kCVPixelFormatType_8Indexed: return "kCVPixelFormatType_8Indexed" + case kCVPixelFormatType_1IndexedGray_WhiteIsZero: return "kCVPixelFormatType_1IndexedGray_WhiteIsZero" + case kCVPixelFormatType_2IndexedGray_WhiteIsZero: return "kCVPixelFormatType_2IndexedGray_WhiteIsZero" + case kCVPixelFormatType_4IndexedGray_WhiteIsZero: return "kCVPixelFormatType_4IndexedGray_WhiteIsZero" + case kCVPixelFormatType_8IndexedGray_WhiteIsZero: return "kCVPixelFormatType_8IndexedGray_WhiteIsZero" + case kCVPixelFormatType_16BE555: return "kCVPixelFormatType_16BE555" + case kCVPixelFormatType_16LE555: return "kCVPixelFormatType_16LE555" + case kCVPixelFormatType_16LE5551: return "kCVPixelFormatType_16LE5551" + case kCVPixelFormatType_16BE565: return "kCVPixelFormatType_16BE565" + case kCVPixelFormatType_16LE565: return "kCVPixelFormatType_16LE565" + case kCVPixelFormatType_24RGB: return "kCVPixelFormatType_24RGB" + case kCVPixelFormatType_24BGR: return "kCVPixelFormatType_24BGR" + case kCVPixelFormatType_32ARGB: return "kCVPixelFormatType_32ARGB" + case kCVPixelFormatType_32BGRA: return "kCVPixelFormatType_32BGRA" + case kCVPixelFormatType_32ABGR: return "kCVPixelFormatType_32ABGR" + case kCVPixelFormatType_32RGBA: return "kCVPixelFormatType_32RGBA" + case kCVPixelFormatType_64ARGB: return "kCVPixelFormatType_64ARGB" + case kCVPixelFormatType_48RGB: return "kCVPixelFormatType_48RGB" + case kCVPixelFormatType_32AlphaGray: return "kCVPixelFormatType_32AlphaGray" + case kCVPixelFormatType_16Gray: return "kCVPixelFormatType_16Gray" + case kCVPixelFormatType_30RGB: return "kCVPixelFormatType_30RGB" + case kCVPixelFormatType_422YpCbCr8: return "kCVPixelFormatType_422YpCbCr8" + case kCVPixelFormatType_4444YpCbCrA8: return "kCVPixelFormatType_4444YpCbCrA8" + case kCVPixelFormatType_4444YpCbCrA8R: return "kCVPixelFormatType_4444YpCbCrA8R" + case kCVPixelFormatType_4444AYpCbCr8: return "kCVPixelFormatType_4444AYpCbCr8" + case kCVPixelFormatType_4444AYpCbCr16: return "kCVPixelFormatType_4444AYpCbCr16" + case kCVPixelFormatType_444YpCbCr8: return "kCVPixelFormatType_444YpCbCr8" + case kCVPixelFormatType_422YpCbCr16: return "kCVPixelFormatType_422YpCbCr16" + case kCVPixelFormatType_422YpCbCr10: return "kCVPixelFormatType_422YpCbCr10" + case kCVPixelFormatType_444YpCbCr10: return "kCVPixelFormatType_444YpCbCr10" + case kCVPixelFormatType_420YpCbCr8Planar: return "kCVPixelFormatType_420YpCbCr8Planar" + case kCVPixelFormatType_420YpCbCr8PlanarFullRange: return "kCVPixelFormatType_420YpCbCr8PlanarFullRange" + case kCVPixelFormatType_422YpCbCr_4A_8BiPlanar: return "kCVPixelFormatType_422YpCbCr_4A_8BiPlanar" + case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: return "kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange" + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: return "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange" + case kCVPixelFormatType_422YpCbCr8_yuvs: return "kCVPixelFormatType_422YpCbCr8_yuvs" + case kCVPixelFormatType_422YpCbCr8FullRange: return "kCVPixelFormatType_422YpCbCr8FullRange" + case kCVPixelFormatType_OneComponent8: return "kCVPixelFormatType_OneComponent8" + case kCVPixelFormatType_TwoComponent8: return "kCVPixelFormatType_TwoComponent8" + case kCVPixelFormatType_30RGBLEPackedWideGamut: return "kCVPixelFormatType_30RGBLEPackedWideGamut" + case kCVPixelFormatType_OneComponent16: return "kCVPixelFormatType_OneComponent16" + case kCVPixelFormatType_OneComponent16Half: return "kCVPixelFormatType_OneComponent16Half" + case kCVPixelFormatType_OneComponent32Float: return "kCVPixelFormatType_OneComponent32Float" + case kCVPixelFormatType_TwoComponent16: return "kCVPixelFormatType_TwoComponent16" + case kCVPixelFormatType_TwoComponent16Half: return "kCVPixelFormatType_TwoComponent16Half" + case kCVPixelFormatType_TwoComponent32Float: return "kCVPixelFormatType_TwoComponent32Float" + case kCVPixelFormatType_64RGBAHalf: return "kCVPixelFormatType_64RGBAHalf" + case kCVPixelFormatType_128RGBAFloat: return "kCVPixelFormatType_128RGBAFloat" + case kCVPixelFormatType_14Bayer_GRBG: return "kCVPixelFormatType_14Bayer_GRBG" + case kCVPixelFormatType_14Bayer_RGGB: return "kCVPixelFormatType_14Bayer_RGGB" + case kCVPixelFormatType_14Bayer_BGGR: return "kCVPixelFormatType_14Bayer_BGGR" + case kCVPixelFormatType_14Bayer_GBRG: return "kCVPixelFormatType_14Bayer_GBRG" + case kCVPixelFormatType_DepthFloat16: return "kCVPixelFormatType_DepthFloat16" + case kCVPixelFormatType_DepthFloat32: return "kCVPixelFormatType_DepthFloat32" + default: return "UNKNOWN" + } + } + + /** + Returns the corresponding (recommended) Metal pixel format for the pixel buffer's format type. + */ + func metalPixelFormat(plane: Int = 0) -> MTLPixelFormat? { + let p = CVPixelBufferGetPixelFormatType(self) + switch p { + case kCVPixelFormatType_OneComponent8, kCVPixelFormatType_TwoComponent8: + return .r8Unorm + case kCVPixelFormatType_OneComponent16, kCVPixelFormatType_TwoComponent16: + return .r16Unorm + case kCVPixelFormatType_OneComponent16Half, kCVPixelFormatType_TwoComponent16Half: + return .r16Float + case kCVPixelFormatType_OneComponent32Float, kCVPixelFormatType_TwoComponent32Float: + return .r32Float + case kCVPixelFormatType_16Gray: + return .r16Unorm + case kCVPixelFormatType_32BGRA: + return .bgra8Unorm + case kCVPixelFormatType_32RGBA: + return .rgba8Unorm + case kCVPixelFormatType_64RGBAHalf: + return .rgba16Float + case kCVPixelFormatType_128RGBAFloat: + return .rgba32Float + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: + return plane == 0 ? .r8Unorm : .rg8Unorm + case kCVPixelFormatType_420YpCbCr8Planar, kCVPixelFormatType_420YpCbCr8PlanarFullRange: + return .r8Unorm + case kCVPixelFormatType_16LE565: + return .b5g6r5Unorm + default: + // The rest either require conversion or are not mappable directly + return nil + } + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.metal new file mode 100644 index 00000000..257cb8cb --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.metal @@ -0,0 +1,36 @@ +// +// CenterCropTransformUtils.metal +// IOSAccessAssessment +// +// Created by Himanshu on 2/9/26. +// + +#include +#include +using namespace metal; +#import "ShaderTypes.h" + +extern "C" kernel void revertCenterCropAspectFitKernel( + texture2d src [[ texture(0) ]], + texture2d dst [[ texture(1) ]], + constant RevertCenterCropParams& params [[ buffer(0) ]], + uint2 gid [[ thread_position_in_grid ]] +) { + if (gid.x >= params.dstWidth || gid.y >= params.dstHeight) { + return; + } + // Destination pixel center + float2 dstPos = float2(gid) + 0.5; + // Map back to source space + float2 srcPos = (dstPos - params.offset) / params.scale; + + // Nearest-neighbor sampling + int sx = int(round(srcPos.x - 0.5)); + int sy = int(round(srcPos.y - 0.5)); + + float4 pixelColor = float4(0.0, 0.0, 0.0, 0.0); + if (sx >= 0 && sy >= 0 && sx < int(params.srcWidth) && sy < int(params.srcHeight)) { + pixelColor = src.read(uint2(sx, sy)); + } + dst.write(pixelColor, gid); +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift new file mode 100644 index 00000000..aab35392 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift @@ -0,0 +1,117 @@ +// +// CenterCropTransformUtils.swift +// IOSAccessAssessment +// +// Created by Himanshu on 2/9/26. +// + +import Metal +import MetalKit +import PointNMapShaderTypes + +public enum CenterCropTransformUtilsError: Error, LocalizedError { + case metalInitializationFailed + case invalidInputImage + case textureCreationFailed + case metalPipelineCreationError + case outputImageCreationFailed + + public var errorDescription: String? { + switch self { + case .metalInitializationFailed: + return "Failed to initialize Metal resources." + case .invalidInputImage: + return "The input image is invalid." + case .textureCreationFailed: + return "Failed to create Metal textures." + case .metalPipelineCreationError: + return "Failed to create Metal compute pipeline." + case .outputImageCreationFailed: + return "Failed to create output CIImage from Metal texture." + } + } +} + +public struct CenterCropTransformUtils { + private let device: MTLDevice + private let commandQueue: MTLCommandQueue + private let textureLoader: MTKTextureLoader + + private let ciContext: CIContext + + public init() throws { + guard let device = MTLCreateSystemDefaultDevice(), + let commandQueue = device.makeCommandQueue() else { + throw CenterCropTransformUtilsError.metalInitializationFailed + } + self.device = device + self.commandQueue = commandQueue + self.textureLoader = MTKTextureLoader(device: device) + + self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) + } + + public func revertCenterCropAspectFit(_ image: CIImage, to destSize: CGSize) throws -> CIImage { + let sourceSize = image.extent.size + let sourceAspect = sourceSize.width / sourceSize.height + let destAspect = destSize.width / destSize.height + + let scale: CGFloat + var offsetX: CGFloat = 0 + var offsetY: CGFloat = 0 + if sourceAspect < destAspect { + scale = destSize.height / sourceSize.height + let scaledWidth = sourceSize.width * scale + offsetX = (destSize.width - scaledWidth) / 2 + } else { + scale = destSize.width / sourceSize.width + let scaledHeight = sourceSize.height * scale + offsetY = (destSize.height - scaledHeight) / 2 + } + var params = RevertCenterCropParams( + srcWidth: UInt32(sourceSize.width), + srcHeight: UInt32(sourceSize.height), + dstWidth: UInt32(destSize.width), + dstHeight: UInt32(destSize.height), + scale: Float(scale), + offset: SIMD2(Float(offsetX), Float(offsetY)) + ) + + let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .r8Unorm, width: Int(destSize.width), height: Int(destSize.height), mipmapped: false) + descriptor.usage = [.shaderRead, .shaderWrite] + guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { + throw CenterCropTransformUtilsError.metalPipelineCreationError + } + let sourceTexture: MTLTexture = try image.toMTLTexture( + device: device, commandBuffer: commandBuffer, pixelFormat: .r8Unorm, + context: ciContext, colorSpace: CGColorSpaceCreateDeviceRGB() + ) + guard let destTexture = self.device.makeTexture(descriptor: descriptor) else { + throw CenterCropTransformUtilsError.textureCreationFailed + } + guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "revertCenterCropAspectFitKernel"), + let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { + throw CenterCropTransformUtilsError.metalInitializationFailed + } + guard let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { + throw CenterCropTransformUtilsError.metalPipelineCreationError + } + commandEncoder.setComputePipelineState(pipeline) + commandEncoder.setTexture(sourceTexture, index: 0) + commandEncoder.setTexture(destTexture, index: 1) + commandEncoder.setBytes(¶ms, length: MemoryLayout.stride, index: 0) + let threadgroupSize = MTLSize(width: pipeline.threadExecutionWidth, height: pipeline.maxTotalThreadsPerThreadgroup / pipeline.threadExecutionWidth, depth: 1) + let threadgroups = MTLSize(width: (Int(destSize.width) + threadgroupSize.width - 1) / threadgroupSize.width, + height: (Int(destSize.height) + threadgroupSize.height - 1) / threadgroupSize.height, + depth: 1) + commandEncoder.dispatchThreadgroups(threadgroups, threadsPerThreadgroup: threadgroupSize) + commandEncoder.endEncoding() + commandBuffer.commit() + commandBuffer.waitUntilCompleted() + + guard let destCIImage = CIImage(mtlTexture: destTexture, options: [.colorSpace: NSNull()]) else { + throw CenterCropTransformUtilsError.outputImageCreationFailed + } + return destCIImage + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtilsExtension.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtilsExtension.swift new file mode 100644 index 00000000..ccd1126b --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtilsExtension.swift @@ -0,0 +1,244 @@ +// +// CenterCropTransformUtils.swift +// IOSAccessAssessment +// +// Created by Himanshu on 10/27/25. +// + +import UIKit + +/** + Utility struct for center crop transformations. + Contains methods to perform center-crop with aspect-fit resizing, to get the transform, process a CIImage, or process a CGRect. + Also contains methods to revert the center-crop transformation. + */ +public extension CenterCropTransformUtils { + /** + Center-crop with aspect-fit resizing. + + This function resizes a CIImage to match the specified size by: + - First, resizing the image to match the smaller dimension while maintaining the aspect ratio. + - Then, cropping the image to the specified size while centering it. + It thus gets the largest possible subregion of the image that fits within the target size without distortion. + */ + static func centerCropAspectFit(_ image: CIImage, to size: CGSize) -> CIImage { + let sourceAspect = image.extent.width / image.extent.height + let destAspect = size.width / size.height + + var transform: CGAffineTransform = .identity + if sourceAspect > destAspect { + let scale = size.height / image.extent.height + let newWidth = image.extent.width * scale + let xOffset = (size.width - newWidth) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: xOffset / scale, y: 0) + } else { + let scale = size.width / image.extent.width + let newHeight = image.extent.height * scale + let yOffset = (size.height - newHeight) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: 0, y: yOffset / scale) + } + let transformedImage = image.transformed(by: transform) + let croppedImage = transformedImage.cropped(to: CGRect(origin: .zero, size: size)) + return croppedImage + } + + static func centerCropAspectFitTransform(imageSize: CGSize, to size: CGSize) -> CGAffineTransform { + let sourceAspect = imageSize.width / imageSize.height + let destAspect = size.width / size.height + + var transform: CGAffineTransform = .identity + if sourceAspect > destAspect { + let scale = size.height / imageSize.height + let newWidth = imageSize.width * scale + let xOffset = (size.width - newWidth) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: xOffset / scale, y: 0) + } else { + let scale = size.width / imageSize.width + let newHeight = imageSize.height * scale + let yOffset = (size.height - newHeight) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: 0, y: yOffset / scale) + } + return transform + } + + /** + This reverse function attempts to revert the effect of `centerCropAspectFit`. + It takes the cropped and resized image, the target size it was resized to, and the original size before resizing and cropping. + It calculates the necessary scaling and translation to restore the image to its original aspect ratio and size. + + - WARNING: + Do not use this function for images without color space or with alpha channels, as it may produce incorrect results. + */ + static func revertCenterCropAspectFit( + _ image: CIImage, from originalSize: CGSize + ) -> CIImage { + let sourceAspect = image.extent.width / image.extent.height + let destAspect = originalSize.width / originalSize.height + + var transform: CGAffineTransform = .identity + var newWidth: CGFloat = originalSize.width + var newHeight: CGFloat = originalSize.height + if sourceAspect < destAspect { + let scale = originalSize.height / image.extent.height + newWidth = originalSize.width + let xOffset = (newWidth - image.extent.width * scale) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: xOffset / scale, y: 0) + } else { + let scale = originalSize.width / image.extent.width + newHeight = originalSize.height + let yOffset = (newHeight - image.extent.height * scale) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: 0, y: yOffset / scale) + } + let transformedImage = image.samplingNearest().transformed(by: transform) + let canvas = CGRect(x: 0, y: 0, width: newWidth, height: newHeight) + let background = CIImage(color: .clear).cropped(to: canvas) + let composed = transformedImage.composited(over: background) + return composed + } + + /** + This reverse function attempts to revert the effect of `centerCropAspectFit`. + It takes the cropped and resized image, the target size it was resized to, and the original size before resizing and cropping. + It calculates the necessary scaling and translation to restore the image to its original aspect ratio and size. + It renders the final image on a provided pixel buffer + */ + static func revertCenterCropAspectFit( + _ image: CIImage, from originalSize: CGSize, + to pixelBuffer: CVPixelBuffer, + context: CIContext, colorSpace: CGColorSpace? = nil + ) { + let sourceAspect = image.extent.width / image.extent.height + let destAspect = originalSize.width / originalSize.height + + var transform: CGAffineTransform = .identity + var newWidth: CGFloat = originalSize.width + var newHeight: CGFloat = originalSize.height + if sourceAspect < destAspect { + let scale = originalSize.height / image.extent.height + newWidth = originalSize.width + let xOffset = (newWidth - image.extent.width * scale) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: xOffset / scale, y: 0) + } else { + let scale = originalSize.width / image.extent.width + newHeight = originalSize.height + let yOffset = (newHeight - image.extent.height * scale) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: 0, y: yOffset / scale) + } + let transformedImage = image.samplingNearest().transformed(by: transform) + let canvas = CGRect(x: 0, y: 0, width: newWidth, height: newHeight) + context.render(transformedImage, to: pixelBuffer, bounds: canvas, colorSpace: colorSpace) + } + + /** + This function returns the transformation to revert the effect of `centerCropAspectFit`. + */ + static func revertCenterCropAspectFitTransform(imageSize: CGSize, from originalSize: CGSize) -> CGAffineTransform { + let sourceAspect = imageSize.width / imageSize.height + let destAspect = originalSize.width / originalSize.height + + var transform: CGAffineTransform = .identity + if sourceAspect < destAspect { + let scale = originalSize.height / imageSize.height + let newWidth = imageSize.width * scale + let xOffset = (originalSize.width - newWidth) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: xOffset / scale, y: 0) + } else { + let scale = originalSize.width / imageSize.width + let newHeight = imageSize.height * scale + let yOffset = (originalSize.height - newHeight) / 2 + transform = CGAffineTransform(scaleX: scale, y: scale) + .translatedBy(x: 0, y: yOffset / scale) + } + return transform + } + + /** + This function computes the bounding rectangle of the center-cropped area within the original image that corresponds to the specified size after center-crop with aspect-fit resizing. + */ + static func centerCropAspectFitBoundingRect(imageSize: CGSize, to size: CGSize) -> CGRect { + let sourceAspect = imageSize.width / imageSize.height + let destAspect = size.width / size.height + + var rect: CGRect = .zero + var scale: CGFloat = 1.0 + var xOffset: CGFloat = 0.0 + var yOffset: CGFloat = 0.0 + if sourceAspect > destAspect { + scale = imageSize.height / size.height + xOffset = (imageSize.width - (size.width * scale)) / 2 + } else { + scale = imageSize.width / size.width + yOffset = (imageSize.height - (size.height * scale)) / 2 + } + rect.size = CGSize(width: size.width * scale, height: size.height * scale) + rect.origin = CGPoint(x: xOffset, y: yOffset) + return rect + } + + /** + This function reverts the effect of `centerCropAspectFit` on a CGRect. + It computes the original rectangle in the source image that corresponds to the given rectangle in the cropped and resized image. + */ + static func revertCenterCropAspectFitRect(_ rect: CGRect, imageSize: CGSize, from originalSize: CGSize) -> CGRect { + let sourceAspect = imageSize.width / imageSize.height + let destAspect = originalSize.width / originalSize.height + + var transform: CGAffineTransform = .identity + if sourceAspect < destAspect { + // Image was cropped horizontally because original is wider + let scale = imageSize.height / originalSize.height + let newImageSize = CGSize(width: imageSize.width / scale, height: imageSize.height / scale) + let xOffset = (originalSize.width - newImageSize.width) / (2 * originalSize.width) + let widthScale = newImageSize.width / originalSize.width + transform = CGAffineTransform(scaleX: widthScale, y: 1) + .translatedBy(x: xOffset / widthScale, y: 0) + } else { + // Image was cropped vertically because original is taller + let scale = imageSize.width / originalSize.width + let newImageSize = CGSize(width: imageSize.width / scale, height: imageSize.height / scale) + let yOffset = (originalSize.height - newImageSize.height) / (2 * originalSize.height) + let heightScale = newImageSize.height / originalSize.height + transform = CGAffineTransform(scaleX: 1, y: heightScale) + .translatedBy(x: 0, y: yOffset / heightScale) + } + let revertedRect = rect.applying(transform) + return revertedRect + } + + /** + This function returns the transformation to reverse the effect of `centerCropAspectFit` on normalized co-ordinates. + */ + static func revertCenterCropAspectFitNormalizedTransform(imageSize: CGSize, from originalSize: CGSize) -> CGAffineTransform { + let sourceAspect = imageSize.width / imageSize.height + let destAspect = originalSize.width / originalSize.height + + var transform: CGAffineTransform = .identity + if sourceAspect < destAspect { + // Image was cropped horizontally because original is wider + let scale = imageSize.height / originalSize.height + let newImageSize = CGSize(width: imageSize.width / scale, height: imageSize.height / scale) + let xOffset = (originalSize.width - newImageSize.width) / (2 * originalSize.width) + let widthScale = newImageSize.width / originalSize.width + transform = CGAffineTransform(scaleX: widthScale, y: 1) + .translatedBy(x: xOffset / widthScale, y: 0) + } else { + // Image was cropped vertically because original is taller + let scale = imageSize.width / originalSize.width + let newImageSize = CGSize(width: imageSize.width / scale, height: imageSize.height / scale) + let yOffset = (originalSize.height - newImageSize.height) / (2 * originalSize.height) + let heightScale = newImageSize.height / originalSize.height + transform = CGAffineTransform(scaleX: 1, y: heightScale) + .translatedBy(x: 0, y: yOffset / heightScale) + } + return transform + } +} diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/MTLTextureUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/MTLTextureUtils.swift new file mode 100644 index 00000000..a3043df4 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/MTLTextureUtils.swift @@ -0,0 +1,25 @@ +// +// MTLTextureUtils.swift +// IOSAccessAssessment +// +// Created by Himanshu on 2/8/26. +// + +import Metal + +extension MTLTexture { + /** + Returns a string representation of the pixel format type of the metal texture. + */ + func pixelFormatName() -> String { + let p = self.pixelFormat + switch p { + case .r8Unorm: return "r8Unorm" + case .r16Float: return "r16Float" + case .r32Float: return "r32Float" + case .rgba8Unorm: return "rgba8Unorm" + case .rgba16Float: return "rgba16Float" + default: return "Other (\(p))" + } + } +} diff --git a/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift b/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift index cc73a184..91870863 100644 --- a/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift +++ b/PointNMapShared/Sources/PointNMap/Geospatial/LocationManager.swift @@ -8,6 +8,7 @@ import CoreLocation import UIKit import MapKit +import Combine public enum LocationManagerError: Error, LocalizedError { case locationUnavailable From 9e349048cdaf634e963f0de16ddd9b0058fe3d30 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 13:37:21 -0700 Subject: [PATCH 06/14] Start moving computer vision folder to framework --- ARCHITECTURE_NOTES.md | 9 ++ IOSAccessAssessment.xcodeproj/project.pbxproj | 100 ++++++------------ .../ARCamera/ARCameraManager.swift | 1 + .../ARCamera/Helpers/FrameRasterizer.swift | 1 + .../ARCamera/TestCameraManager.swift | 1 + .../Contour/ContourFeatureRasterizer.swift | 6 +- .../Contour/ContourRequestProcessor.swift | 6 +- .../DepthEstimation/DepthModel.swift | 1 + .../SegmentationModelRequestProcessor.swift | 1 + .../Image/Contour/ContourUtils.swift | 20 ++-- .../Image/Homography/Homography.metal | 0 .../HomographyRequestProcessor.swift | 8 +- .../HomographyTransformFilter.swift | 12 +-- .../ImageProcessing/BinaryMaskFilter.swift | 10 +- .../DimensionBasedMaskFilter.swift | 10 +- .../GrayscaleToColorFilter.swift | 10 +- .../ImageProcessing/ImageProcessing.metal | 0 .../ImageProcessing/IntersectionFilter.swift | 10 +- .../Image/UnionOfMasks/UnionOfMasks.metal | 0 .../UnionOfMasks/UnionOfMasksProcessor.swift | 22 ++-- .../Image/Utils/CGImageUtils.swift | 2 +- .../Image/Utils/CIImageUtils.swift | 14 +-- .../Image/Utils/CVPixelBufferUtils.swift | 12 +-- 23 files changed, 116 insertions(+), 140 deletions(-) create mode 100644 ARCHITECTURE_NOTES.md rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Contour/ContourUtils.swift (93%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Homography/Homography.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Homography/HomographyRequestProcessor.swift (90%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Homography/HomographyTransformFilter.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift (95%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/ImageProcessing/ImageProcessing.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift (93%) diff --git a/ARCHITECTURE_NOTES.md b/ARCHITECTURE_NOTES.md new file mode 100644 index 00000000..0e6b4182 --- /dev/null +++ b/ARCHITECTURE_NOTES.md @@ -0,0 +1,9 @@ +# Architecture Notes + +## Overview + +### Main App: iOSAccessAssessment + +### Shared Code: PointNMapShared + +Framework. diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 648c8acc..421cab0a 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -17,7 +17,6 @@ A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A305B05B2E18882500ECCF9B /* DatasetEncoder.swift */; }; A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A305B06B2E18A85D00ECCF9B /* DepthCoder.swift */; }; A306462A2D614D9600B97D1B /* ImageSaver.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30646292D614D9400B97D1B /* ImageSaver.swift */; }; - A30801502EC0926800B1BA3A /* ContourUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308014F2EC0926500B1BA3A /* ContourUtils.swift */; }; A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801522EC09B1D00B1BA3A /* AccessibilityFeatureConfig.swift */; }; A308015C2EC09BB700B1BA3A /* CityscapesClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801572EC09BB700B1BA3A /* CityscapesClassConfig.swift */; }; A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801582EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift */; }; @@ -91,7 +90,6 @@ A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */; }; A37C3C182F3141FF001F4248 /* Plane.metal in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C172F3141F9001F4248 /* Plane.metal */; }; A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C192F3144F4001F4248 /* PlaneAttributeProcessor.swift */; }; - A37C3C1C2F356254001F4248 /* IntersectionFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C1B2F356254001F4248 /* IntersectionFilter.swift */; }; A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E3B2EED60F300B07B77 /* PngEncoder.mm */; }; A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E392EED60F300B07B77 /* lodepng.cpp */; }; A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E942EFB66E600B07B77 /* CameraIntrinsicsCoder.swift */; }; @@ -114,7 +112,6 @@ A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413AC2ECF94950039298C /* DBSCAN.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */; }; - A3B2DDBF2DC99DEF003416FB /* HomographyRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */; }; A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */; }; A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */; }; A3B5BDA02F831F270036C6EC /* ProjectionUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BD9F2F831F250036C6EC /* ProjectionUtils.swift */; }; @@ -122,7 +119,6 @@ A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; - A3BB5AFB2DB210AE008673ED /* BinaryMaskFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */; }; A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; A3C1D7352F84A78E00833411 /* SurfaceNormalsProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7342F84A78800833411 /* SurfaceNormalsProcessor.swift */; }; A3C1D7392F84A84900833411 /* SurfaceNormals.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7382F84A84900833411 /* SurfaceNormals.metal */; }; @@ -138,12 +134,6 @@ A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */; }; A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */; }; A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */; }; - A3DC22E92DCF0F9A0020CE84 /* ImageProcessing.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22E82DCF0F9A0020CE84 /* ImageProcessing.metal */; }; - A3DC22ED2DCF10050020CE84 /* Homography.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22EC2DCF10050020CE84 /* Homography.metal */; }; - A3DC22EF2DCF119A0020CE84 /* HomographyTransformFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22EE2DCF11970020CE84 /* HomographyTransformFilter.swift */; }; - A3DC22F72DD032960020CE84 /* UnionOfMasks.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22F62DD032960020CE84 /* UnionOfMasks.metal */; }; - A3DC22F92DD036AF0020CE84 /* UnionOfMasksProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22F82DD0369E0020CE84 /* UnionOfMasksProcessor.swift */; }; - A3DC22FB2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DC22FA2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift */; }; A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3E162772F3AFC63002D4D08 /* MeshCoder.swift */; }; A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A3E6D2322F464A2700DAF88E /* PngDecoder.mm */; }; A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E422F57A98A00F515E6 /* DatasetDecoder.swift */; }; @@ -183,7 +173,6 @@ DAA7F8B52CA38C11003666D8 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B42CA38C11003666D8 /* Constants.swift */; }; DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */; }; DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */; }; - DAA7F8CC2CA77FA5003666D8 /* GrayscaleToColorFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -268,7 +257,6 @@ A305B05B2E18882500ECCF9B /* DatasetEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatasetEncoder.swift; sourceTree = ""; }; A305B06B2E18A85D00ECCF9B /* DepthCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthCoder.swift; sourceTree = ""; }; A30646292D614D9400B97D1B /* ImageSaver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageSaver.swift; sourceTree = ""; }; - A308014F2EC0926500B1BA3A /* ContourUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourUtils.swift; sourceTree = ""; }; A30801522EC09B1D00B1BA3A /* AccessibilityFeatureConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureConfig.swift; sourceTree = ""; }; A30801562EC09BB700B1BA3A /* VOCClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VOCClassConfig.swift; sourceTree = ""; }; A30801572EC09BB700B1BA3A /* CityscapesClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CityscapesClassConfig.swift; sourceTree = ""; }; @@ -339,7 +327,6 @@ A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMChangesetUploadResponseElement.swift; sourceTree = ""; }; A37C3C172F3141F9001F4248 /* Plane.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Plane.metal; sourceTree = ""; }; A37C3C192F3144F4001F4248 /* PlaneAttributeProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneAttributeProcessor.swift; sourceTree = ""; }; - A37C3C1B2F356254001F4248 /* IntersectionFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IntersectionFilter.swift; sourceTree = ""; }; A37E3E382EED60F300B07B77 /* lodepng.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = lodepng.h; sourceTree = ""; }; A37E3E392EED60F300B07B77 /* lodepng.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = lodepng.cpp; sourceTree = ""; }; A37E3E3A2EED60F300B07B77 /* PngEncoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngEncoder.h; sourceTree = ""; }; @@ -364,7 +351,6 @@ A3A413AC2ECF94950039298C /* DBSCAN.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DBSCAN.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneRasterizer.swift; sourceTree = ""; }; - A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomographyRequestProcessor.swift; sourceTree = ""; }; A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationModelRequestProcessor.swift; sourceTree = ""; }; A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionRasterizer.swift; sourceTree = ""; }; A3B5BD9F2F831F250036C6EC /* ProjectionUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProjectionUtils.swift; sourceTree = ""; }; @@ -372,7 +358,6 @@ A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsGridExtension.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; - A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BinaryMaskFilter.swift; sourceTree = ""; }; A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; A3C1D7342F84A78800833411 /* SurfaceNormalsProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceNormalsProcessor.swift; sourceTree = ""; }; A3C1D7382F84A84900833411 /* SurfaceNormals.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = SurfaceNormals.metal; sourceTree = ""; }; @@ -388,12 +373,6 @@ A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationMeshRecord.swift; sourceTree = ""; }; A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalContext.swift; sourceTree = ""; }; - A3DC22E82DCF0F9A0020CE84 /* ImageProcessing.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = ImageProcessing.metal; sourceTree = ""; }; - A3DC22EC2DCF10050020CE84 /* Homography.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Homography.metal; sourceTree = ""; }; - A3DC22EE2DCF11970020CE84 /* HomographyTransformFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomographyTransformFilter.swift; sourceTree = ""; }; - A3DC22F62DD032960020CE84 /* UnionOfMasks.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = UnionOfMasks.metal; sourceTree = ""; }; - A3DC22F82DD0369E0020CE84 /* UnionOfMasksProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UnionOfMasksProcessor.swift; sourceTree = ""; }; - A3DC22FA2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DimensionBasedMaskFilter.swift; sourceTree = ""; }; A3E162772F3AFC63002D4D08 /* MeshCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshCoder.swift; sourceTree = ""; }; A3E6D2312F4649AD00DAF88E /* PngDecoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngDecoder.h; sourceTree = ""; }; A3E6D2322F464A2700DAF88E /* PngDecoder.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PngDecoder.mm; sourceTree = ""; }; @@ -433,12 +412,24 @@ DAA7F8B42CA38C11003666D8 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpinnerView.swift; sourceTree = ""; }; DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressBar.swift; sourceTree = ""; }; - DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GrayscaleToColorFilter.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift, + Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal, + Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift, + Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift, + Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal, + Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift, + ); platformFiltersByRelativePath = { PointNMapShared.h = ( ios, @@ -457,10 +448,27 @@ ); target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; }; + A312FE222FA3F1C90044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift, + Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal, + Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift, + Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal, + Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift, + Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal, + Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift, + ); + target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; + }; /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ - A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; + A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE222FA3F1C90044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; A312FD8B2FA3391C0044808E /* PointNMapSharedTests */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = PointNMapSharedTests; sourceTree = ""; }; A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE172FA3EBE80044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShaderTypes; sourceTree = ""; }; /* End PBXFileSystemSynchronizedRootGroup section */ @@ -649,22 +657,11 @@ path = LocalDataset; sourceTree = ""; }; - A308014C2EC091DC00B1BA3A /* Homography */ = { - isa = PBXGroup; - children = ( - A3DC22EC2DCF10050020CE84 /* Homography.metal */, - A3DC22EE2DCF11970020CE84 /* HomographyTransformFilter.swift */, - A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */, - ); - path = Homography; - sourceTree = ""; - }; A308014D2EC091E400B1BA3A /* Contour */ = { isa = PBXGroup; children = ( A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */, A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */, - A308014F2EC0926500B1BA3A /* ContourUtils.swift */, ); path = Contour; sourceTree = ""; @@ -1189,18 +1186,6 @@ path = Extensions; sourceTree = ""; }; - A3BB5AF92DB21080008673ED /* ImageProcessing */ = { - isa = PBXGroup; - children = ( - A3DC22E82DCF0F9A0020CE84 /* ImageProcessing.metal */, - A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */, - A37C3C1B2F356254001F4248 /* IntersectionFilter.swift */, - A3DC22FA2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift */, - DAA7F8CB2CA77FA5003666D8 /* GrayscaleToColorFilter.swift */, - ); - path = ImageProcessing; - sourceTree = ""; - }; A3C1D73E2F882EE100833411 /* SurfaceIntegrity */ = { isa = PBXGroup; children = ( @@ -1243,15 +1228,6 @@ path = Utils; sourceTree = ""; }; - A3DA4DC02EBE86C7005BB812 /* UnionOfMasks */ = { - isa = PBXGroup; - children = ( - A3DC22F62DD032960020CE84 /* UnionOfMasks.metal */, - A3DC22F82DD0369E0020CE84 /* UnionOfMasksProcessor.swift */, - ); - path = UnionOfMasks; - sourceTree = ""; - }; A3DA4DC12EBE87B6005BB812 /* Utils */ = { isa = PBXGroup; children = ( @@ -1338,9 +1314,6 @@ children = ( A38338C42EDAF3DC00F1A402 /* Depth */, A308014D2EC091E400B1BA3A /* Contour */, - A308014C2EC091DC00B1BA3A /* Homography */, - A3BB5AF92DB21080008673ED /* ImageProcessing */, - A3DA4DC02EBE86C7005BB812 /* UnionOfMasks */, A362AEC72DB59577002D7598 /* Helpers */, ); path = Image; @@ -1606,19 +1579,15 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - A37C3C1C2F356254001F4248 /* IntersectionFilter.swift in Sources */, A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */, A3FE16632E18BAEB00DAE5BE /* ConfidenceEncoder.swift in Sources */, - A3BB5AFB2DB210AE008673ED /* BinaryMaskFilter.swift in Sources */, A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */, A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */, - A3B2DDBF2DC99DEF003416FB /* HomographyRequestProcessor.swift in Sources */, A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */, CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */, A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */, A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */, A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */, - DAA7F8CC2CA77FA5003666D8 /* GrayscaleToColorFilter.swift in Sources */, A306462A2D614D9600B97D1B /* ImageSaver.swift in Sources */, A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */, A3A413A22EC9C3FA0039298C /* MeshRasterizer.swift in Sources */, @@ -1651,7 +1620,6 @@ CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */, - A3DC22FB2DD16CB00020CE84 /* DimensionBasedMaskFilter.swift in Sources */, A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */, A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */, A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, @@ -1669,7 +1637,6 @@ A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, - A3DC22F72DD032960020CE84 /* UnionOfMasks.metal in Sources */, A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */, A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */, A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */, @@ -1677,7 +1644,6 @@ CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */, A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */, - A3DC22E92DCF0F9A0020CE84 /* ImageProcessing.metal in Sources */, A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, A37E72142ED95C0C00CFE4EF /* MeshHelpers.swift in Sources */, A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, @@ -1687,7 +1653,6 @@ A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */, A3EE6EFC2F69285600F515E6 /* LocationFromImageExtension.swift in Sources */, 55659C102BB7863F0094DF01 /* SetupView.swift in Sources */, - A3DC22F92DD036AF0020CE84 /* UnionOfMasksProcessor.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A32D66FD2F7EF10F00DC4173 /* DamageDetectionPipeline.swift in Sources */, A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */, @@ -1711,7 +1676,6 @@ A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */, A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */, A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, - A30801502EC0926800B1BA3A /* ContourUtils.swift in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */, @@ -1743,13 +1707,11 @@ A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */, A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, - A3DC22EF2DCF119A0020CE84 /* HomographyTransformFilter.swift in Sources */, A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, - A3DC22ED2DCF10050020CE84 /* Homography.metal in Sources */, A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */, A364B5DF2F26DB5700325E5C /* WorldPointsProcessor.swift in Sources */, A374B4AC2F8EF654003E030D /* CurrentMappingData.swift in Sources */, diff --git a/IOSAccessAssessment/ARCamera/ARCameraManager.swift b/IOSAccessAssessment/ARCamera/ARCameraManager.swift index 3d9a1c72..ac8660b0 100644 --- a/IOSAccessAssessment/ARCamera/ARCameraManager.swift +++ b/IOSAccessAssessment/ARCamera/ARCameraManager.swift @@ -8,6 +8,7 @@ import ARKit import RealityKit import Combine import simd +import PointNMapShared enum ARCameraManagerError: Error, LocalizedError { case sessionConfigurationFailed diff --git a/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift b/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift index 10629ce0..35215004 100644 --- a/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift +++ b/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift @@ -7,6 +7,7 @@ import CoreImage import UIKit +import PointNMapShared /** A custom Image that displays a bounding box around the region of processing diff --git a/IOSAccessAssessment/ARCamera/TestCameraManager.swift b/IOSAccessAssessment/ARCamera/TestCameraManager.swift index f0de715a..af4ce958 100644 --- a/IOSAccessAssessment/ARCamera/TestCameraManager.swift +++ b/IOSAccessAssessment/ARCamera/TestCameraManager.swift @@ -9,6 +9,7 @@ import ARKit import RealityKit import Combine import simd +import PointNMapShared final class TestCameraManager: NSObject, ObservableObject, TestCameraProcessingDelegate { var selectedClasses: [AccessibilityFeatureClass] = [] diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift index 64f4da9b..f8b4bcf9 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift @@ -13,13 +13,13 @@ import PointNMapShared A temporary struct to perform rasterization of detected objects. TODO: This should be replaced by a lower-level rasterization function that uses Metal or Core Graphics directly. */ -struct ContourFeatureRasterizer { - static func colorForClass(_ classLabel: UInt8, labelToColorMap: [UInt8: CIColor]) -> UIColor { +public struct ContourFeatureRasterizer { + public static func colorForClass(_ classLabel: UInt8, labelToColorMap: [UInt8: CIColor]) -> UIColor { let color = labelToColorMap[classLabel] ?? CIColor(red: 0, green: 0, blue: 0) return UIColor(red: color.red, green: color.green, blue: color.blue, alpha: 1.0) } - static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { + public static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { let path = UIBezierPath() guard let firstPoint = points.first else { return path } diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift index d5b85482..73d8672e 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift @@ -7,11 +7,11 @@ import Vision import CoreImage -enum ContourRequestProcessorError: Error, LocalizedError { +public enum ContourRequestProcessorError: Error, LocalizedError { case contourProcessingFailed case binaryMaskGenerationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .contourProcessingFailed: return "Contour processing failed." @@ -29,7 +29,7 @@ enum ContourRequestProcessorError: Error, LocalizedError { This can cause confusion in the app because it pre-dominantly uses Core Video, ARKit, etc. which use a coordinate system with the origin at the top-left corner. To reduce confusion, we can preemptively convert the coordinates to the top-left origin. We would also need to change ContourDetails to reflect this change, by not using CGPoint, CGRect, etc. which are based on the bottom-left origin, and instead use a custom struct that can represent the coordinates in the top-left origin. */ -struct ContourRequestProcessor { +public struct ContourRequestProcessor { var contourEpsilon: Float = 0.01 /// For normalized points var perimeterThreshold: Float = 0.01 diff --git a/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift b/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift index e2f18c85..c2d195a9 100644 --- a/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift +++ b/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift @@ -9,6 +9,7 @@ import Vision import CoreML import CoreImage import os +import PointNMapShared enum DepthError: Error, LocalizedError { case emptyDepth diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift b/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift index fa2432dd..a9fa1deb 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift @@ -7,6 +7,7 @@ import CoreML import Vision import CoreImage +import PointNMapShared enum SegmentationModelError: Error, LocalizedError { case modelLoadingError diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift similarity index 93% rename from IOSAccessAssessment/ComputerVision/Image/Contour/ContourUtils.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift index ad2b5979..e782b7b7 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift @@ -7,19 +7,19 @@ import Vision -struct ContourUtils { +public struct ContourUtils { /** Function to compute the centroid, bounding box, and perimeter of a contour more efficiently TODO: Check if the performance can be improved by using SIMD operations */ - static func getCentroidAreaBounds( + public static func getCentroidAreaBounds( contour: VNContour ) -> (centroid: CGPoint, boundingBox: CGRect, perimeter: Float, area: Float) { let points = contour.normalizedPoints return getCentroidAreaBounds(normalizedPoints: points) } - static func getCentroidAreaBounds( + public static func getCentroidAreaBounds( normalizedPoints points: [simd_float2] ) -> (centroid: CGPoint, boundingBox: CGRect, perimeter: Float, area: Float) { guard !points.isEmpty else { return (CGPoint.zero, .zero, 0, 0) } @@ -34,11 +34,11 @@ struct ContourUtils { /** Use shoelace formula to calculate the area and centroid of the contour together. */ - static func getCentroidAndArea(contour: VNContour) -> (centroid: CGPoint, area: Float) { + public static func getCentroidAndArea(contour: VNContour) -> (centroid: CGPoint, area: Float) { let points = contour.normalizedPoints return getCentroidAndArea(normalizedPoints: points) } - static func getCentroidAndArea(normalizedPoints points: [simd_float2]) -> (centroid: CGPoint, area: Float) { + public static func getCentroidAndArea(normalizedPoints points: [simd_float2]) -> (centroid: CGPoint, area: Float) { guard !points.isEmpty else { return (CGPoint.zero, 0) } let count = points.count @@ -74,11 +74,11 @@ struct ContourUtils { return (centroid, area) } - static func getBoundingBox(contour: VNContour) -> CGRect { + public static func getBoundingBox(contour: VNContour) -> CGRect { let points = contour.normalizedPoints return getBoundingBox(normalizedPoints: points) } - static func getBoundingBox(normalizedPoints points: [simd_float2]) -> CGRect { + public static func getBoundingBox(normalizedPoints points: [simd_float2]) -> CGRect { guard !points.isEmpty else { return .zero } var minX = points[0].x @@ -99,11 +99,11 @@ struct ContourUtils { ) } - static func getPerimeter(contour: VNContour) -> Float { + public static func getPerimeter(contour: VNContour) -> Float { let points = contour.normalizedPoints return getPerimeter(normalizedPoints: points) } - static func getPerimeter(normalizedPoints points: [simd_float2]) -> Float { + public static func getPerimeter(normalizedPoints points: [simd_float2]) -> Float { guard !points.isEmpty else { return 0 } var perimeter: Float = 0.0 @@ -125,7 +125,7 @@ struct ContourUtils { /** NOTE: Methods that need to be replaced. */ -extension ContourUtils { +public extension ContourUtils { /** Function to get the bounding box of the contour as a trapezoid. This is the largest trapezoid that can be contained in the contour and has horizontal lines. - Parameters: diff --git a/IOSAccessAssessment/ComputerVision/Image/Homography/Homography.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Image/Homography/Homography.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal diff --git a/IOSAccessAssessment/ComputerVision/Image/Homography/HomographyRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift similarity index 90% rename from IOSAccessAssessment/ComputerVision/Image/Homography/HomographyRequestProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift index 4ff99b45..96bded64 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Homography/HomographyRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift @@ -8,10 +8,10 @@ import Vision import simd import CoreImage -enum HomographyRequestProcessorError: Error, LocalizedError { +public enum HomographyRequestProcessorError: Error, LocalizedError { case homographyProcessingError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .homographyProcessingError: return "Error occurred while processing the homography request." @@ -19,9 +19,9 @@ enum HomographyRequestProcessorError: Error, LocalizedError { } } -struct HomographyRequestProcessor { +public struct HomographyRequestProcessor { /// Computes the homography transform for the reference image and the floating image. - func getHomographyTransform( + public func getHomographyTransform( referenceImage: CIImage, floatingImage: CIImage, orientation: CGImagePropertyOrientation = .up ) throws -> simd_float3x3 { let imageRequestHandler = VNImageRequestHandler(ciImage: referenceImage, orientation: orientation, options: [:]) diff --git a/IOSAccessAssessment/ComputerVision/Image/Homography/HomographyTransformFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Image/Homography/HomographyTransformFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift index b02bbbd1..3e3206fe 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Homography/HomographyTransformFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift @@ -10,14 +10,14 @@ import Metal import CoreImage import MetalKit -enum HomographyTransformFilterError: Error, LocalizedError { +public enum HomographyTransformFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -36,7 +36,7 @@ enum HomographyTransformFilterError: Error, LocalizedError { /** HomographyTransformFilter is a class that applies a homography transformation to a CIImage using Metal. */ -struct HomographyTransformFilter { +public struct HomographyTransformFilter { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -45,7 +45,7 @@ struct HomographyTransformFilter { private let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw HomographyTransformFilterError.metalInitializationFailed @@ -70,7 +70,7 @@ struct HomographyTransformFilter { - inputImage: The input CIImage to be transformed. Of color space nil, single-channel. - transformMatrix: A 3x3 matrix representing the homography transformation. */ - func apply( + public func apply( to inputImage: CIImage, transformMatrix: simd_float3x3, inputPixelFormat: MTLPixelFormat = .r8Unorm, outputPixelFormat: MTLPixelFormat = .r8Unorm @@ -82,7 +82,7 @@ struct HomographyTransformFilter { descriptor.usage = [.shaderRead, .shaderWrite] guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { - throw GrayscaleToColorFilterError.metalPipelineCreationError + throw HomographyTransformFilterError.metalPipelineCreationError } let inputTexture = try inputImage.toMTLTexture( diff --git a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift index 1297ed4e..3a3a7406 100644 --- a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift @@ -11,14 +11,14 @@ import Metal import CoreImage import MetalKit -enum BinaryMaskFilterError: Error, LocalizedError { +public enum BinaryMaskFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -34,7 +34,7 @@ enum BinaryMaskFilterError: Error, LocalizedError { } } -struct BinaryMaskFilter { +public struct BinaryMaskFilter { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -43,7 +43,7 @@ struct BinaryMaskFilter { private let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw BinaryMaskFilterError.metalInitializationFailed @@ -68,7 +68,7 @@ struct BinaryMaskFilter { - inputImage: The input CIImage to be processed. Of color space nil, single-channel. - targetValue: The target pixel value to create the binary mask. */ - func apply(to inputImage: CIImage, targetValue: UInt8) throws -> CIImage { + public func apply(to inputImage: CIImage, targetValue: UInt8) throws -> CIImage { let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .r8Unorm, width: Int(inputImage.extent.width), height: Int(inputImage.extent.height), mipmapped: false) descriptor.usage = [.shaderRead, .shaderWrite] diff --git a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift index 53f5ad6e..fea08363 100644 --- a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift @@ -11,14 +11,14 @@ import Metal import CoreImage import MetalKit -enum DimensionBasedMaskFilterError: Error, LocalizedError { +public enum DimensionBasedMaskFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -39,7 +39,7 @@ enum DimensionBasedMaskFilterError: Error, LocalizedError { A struct that applies a binary mask to an image using Metal. The mask is applied based on a target value and specified bounds. */ -struct DimensionBasedMaskFilter { +public struct DimensionBasedMaskFilter { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -49,7 +49,7 @@ struct DimensionBasedMaskFilter { private let ciContext: CIContext private let outputColorSpace = CGColorSpaceCreateDeviceRGB() - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw DimensionBasedMaskFilterError.metalInitializationFailed @@ -67,7 +67,7 @@ struct DimensionBasedMaskFilter { self.pipeline = pipeline } - func apply(to inputImage: CIImage, bounds: CGRect) throws -> CIImage { + public func apply(to inputImage: CIImage, bounds: CGRect) throws -> CIImage { let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .r8Unorm, width: Int(inputImage.extent.width), height: Int(inputImage.extent.height), mipmapped: false) descriptor.usage = [.shaderRead, .shaderWrite] diff --git a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift similarity index 95% rename from IOSAccessAssessment/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift index 132058c0..15572a3d 100644 --- a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift @@ -10,14 +10,14 @@ import Metal import CoreImage import MetalKit -enum GrayscaleToColorFilterError: Error, LocalizedError { +public enum GrayscaleToColorFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -33,7 +33,7 @@ enum GrayscaleToColorFilterError: Error, LocalizedError { } } -struct GrayscaleToColorFilter { +public struct GrayscaleToColorFilter { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -43,7 +43,7 @@ struct GrayscaleToColorFilter { private let ciContext: CIContext private let outputColorSpace = CGColorSpaceCreateDeviceRGB() - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw GrayscaleToColorFilterError.metalInitializationFailed @@ -69,7 +69,7 @@ struct GrayscaleToColorFilter { - grayscaleValues: An array of Float values representing the grayscale levels (0.0 to 1.0). - colorValues: An array of CIColor values corresponding to the grayscale levels. */ - func apply(to inputImage: CIImage, grayscaleValues: [Float], colorValues: [CIColor]) throws -> CIImage { + public func apply(to inputImage: CIImage, grayscaleValues: [Float], colorValues: [CIColor]) throws -> CIImage { let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm, width: Int(inputImage.extent.width), height: Int(inputImage.extent.height), mipmapped: false) descriptor.usage = [.shaderRead, .shaderWrite] diff --git a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/ImageProcessing.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Image/ImageProcessing/ImageProcessing.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal diff --git a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift index 45e79529..87035809 100644 --- a/IOSAccessAssessment/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift @@ -11,14 +11,14 @@ import Metal import CoreImage import MetalKit -enum IntersectionFilterError: Error, LocalizedError { +public enum IntersectionFilterError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed case metalPipelineCreationError case outputImageCreationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -34,7 +34,7 @@ enum IntersectionFilterError: Error, LocalizedError { } } -struct IntersectionFilter { +public struct IntersectionFilter { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -43,7 +43,7 @@ struct IntersectionFilter { private let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw IntersectionFilterError.metalInitializationFailed @@ -64,7 +64,7 @@ struct IntersectionFilter { /** Applies the intersection filter to two input images and returns the resulting image. */ - func apply(inputImage1: CIImage, inputImage2: CIImage) throws -> CIImage { + public func apply(inputImage1: CIImage, inputImage2: CIImage) throws -> CIImage { let descriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .r8Unorm, width: Int(inputImage1.extent.width), height: Int(inputImage1.extent.height), mipmapped: false) descriptor.usage = [.shaderRead, .shaderWrite] diff --git a/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal diff --git a/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift similarity index 93% rename from IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift index 6892fec1..5c959ad9 100644 --- a/IOSAccessAssessment/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift @@ -10,7 +10,7 @@ import CoreImage import MetalKit import PointNMapShared -enum UnionOfMasksProcessorError: Error, LocalizedError { +public enum UnionOfMasksProcessorError: Error, LocalizedError { case metalInitializationFailed case metalPipelineCreationError case invalidInputImage @@ -19,7 +19,7 @@ enum UnionOfMasksProcessorError: Error, LocalizedError { case outputImageCreationFailed case invalidPixelFormat - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -43,7 +43,7 @@ enum UnionOfMasksProcessorError: Error, LocalizedError { UnionOfMasksProcessor is a class that processes an array of CIImages to compute the union of masks using Metal. It performs a simple weighted union operation on the input images, where each image is treated as a mask. Only the last frame can be weighted differently from the rest. */ -class UnionOfMasksProcessor { +public class UnionOfMasksProcessor { // Metal-related properties private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -52,13 +52,13 @@ class UnionOfMasksProcessor { private let ciContext: CIContext - var arrayTexture: MTLTexture? - var imageCount: Int = 0 - var format: MTLPixelFormat = .rgba8Unorm - var width: Int = 0 - var height: Int = 0 + public var arrayTexture: MTLTexture? + public var imageCount: Int = 0 + public var format: MTLPixelFormat = .rgba8Unorm + public var width: Int = 0 + public var height: Int = 0 - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw UnionOfMasksProcessorError.metalInitializationFailed @@ -83,7 +83,7 @@ class UnionOfMasksProcessor { - images: An array of CIImage objects to be combined into an array texture. - format: The pixel format for the texture. Default is .rgba8Unorm. */ - func setArrayTexture(images: [CIImage], format: MTLPixelFormat = .r8Unorm) throws { + public func setArrayTexture(images: [CIImage], format: MTLPixelFormat = .r8Unorm) throws { let imageCount = images.count guard imageCount > 0 else { throw UnionOfMasksProcessorError.invalidInputImage @@ -139,7 +139,7 @@ class UnionOfMasksProcessor { self.format = format } - func apply(targetValue: UInt8, unionOfMasksPolicy: UnionOfMasksPolicy = UnionOfMasksPolicy.default) throws -> CIImage { + public func apply(targetValue: UInt8, unionOfMasksPolicy: UnionOfMasksPolicy = UnionOfMasksPolicy.default) throws -> CIImage { guard let inputImages = self.arrayTexture else { throw UnionOfMasksProcessorError.arrayTextureNotSet } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift index 772874de..996c0a0e 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CGImageUtils.swift @@ -7,7 +7,7 @@ import UIKit -extension CGImage { +public extension CGImage { // TODO: Need to check if this is applicable to all image types func getByteSize() -> Int { var bytesPerRow: Int = 4 * self.width diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift index b15dc9df..7789466e 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CIImageUtils.swift @@ -8,11 +8,11 @@ import UIKit import MetalKit -enum CIImageUtilsError: Error, LocalizedError { +public enum CIImageUtilsError: Error, LocalizedError { case pixelBufferCreationError case segmentationTextureError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .pixelBufferCreationError: return "Failed to create pixel buffer from CIImage." @@ -22,7 +22,7 @@ enum CIImageUtilsError: Error, LocalizedError { } } -extension CIImage { +public extension CIImage { func croppedToCenter(size: CGSize) -> CIImage { let x = (extent.width - size.width) / 2 let y = (extent.height - size.height) / 2 @@ -48,7 +48,7 @@ extension CIImage { /** Extensions for converting CIImage to CVPixelBuffer. */ -extension CIImage { +public extension CIImage { func toPixelBuffer( context: CIContext, pixelFormatType: OSType = kCVPixelFormatType_32BGRA, colorSpace: CGColorSpace? = nil ) throws -> CVPixelBuffer { @@ -94,7 +94,7 @@ extension CIImage { /** Supporting enum for CIImage to MTLTexture conversion. */ -enum CIImageToMTLTextureOrientation: Sendable { +public enum CIImageToMTLTextureOrientation: Sendable { case cICanonical case metalTopLeft } @@ -102,7 +102,7 @@ enum CIImageToMTLTextureOrientation: Sendable { /** Extensions for converting CIImage to MTLTexture. */ -extension CIImage { +public extension CIImage { /** Converts the CIImage to a MTLTexture using the provided device, command buffer, pixel format, CIContext, and color space. @@ -171,7 +171,7 @@ extension CIImage { /** Debugging functions */ -extension CIImage { +public extension CIImage { /** Function to check if the CIImage has backed data. This is useful for debugging purposes to check what the CIImage is backed by, and the relevant formats. */ diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift index 82437cd0..8e3e0184 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift @@ -8,11 +8,11 @@ import UIKit import Accelerate -struct CVPixelBufferUtils { +public struct CVPixelBufferUtils { /** This function creates a CVPixelBuffer with the specified width, height, and pixel format. */ - static func createPixelBuffer(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_DepthFloat32) -> CVPixelBuffer? { + public static func createPixelBuffer(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_DepthFloat32) -> CVPixelBuffer? { var pixelBuffer: CVPixelBuffer? let attrs = [ kCVPixelBufferCGImageCompatibilityKey: true, @@ -35,7 +35,7 @@ struct CVPixelBufferUtils { return pixelBuffer } - static func createBlankDepthPixelBuffer(targetSize: CGSize) -> CVPixelBuffer? { + public static func createBlankDepthPixelBuffer(targetSize: CGSize) -> CVPixelBuffer? { let width = Int(targetSize.width) let height = Int(targetSize.height) @@ -57,7 +57,7 @@ struct CVPixelBufferUtils { gets the indices of these values from Constants.SelectedAccessibilityFeatureConfig.grayscaleValues, and returns both the unique values and their corresponding indices. */ - static func extractUniqueGrayscaleValues(from pixelBuffer: CVPixelBuffer) -> Set { + public static func extractUniqueGrayscaleValues(from pixelBuffer: CVPixelBuffer) -> Set { CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) } @@ -86,7 +86,7 @@ struct CVPixelBufferUtils { /** Archived methods to be removed later if not needed. */ -extension CVPixelBufferUtils { +public extension CVPixelBufferUtils { // TODO: Check if any of the methods can be sped up using GPU // TODO: Check if the forced unwrapping used all over the functions is safe in the given context static func cropCenterOfPixelBuffer(_ pixelBuffer: CVPixelBuffer, cropSize: CGSize) -> CVPixelBuffer? { @@ -195,7 +195,7 @@ extension CVPixelBufferUtils { } } -extension CVPixelBuffer { +public extension CVPixelBuffer { /** Returns a string representation of the pixel format type of the pixel buffer. */ From 5a7ae7ac8477b77ef824ce4a670ed5e2ca6e74bb Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 14:27:25 -0700 Subject: [PATCH 07/14] Adding more files to PointNMap framework --- IOSAccessAssessment.xcodeproj/project.pbxproj | 148 +----------------- .../ARCamera/ARCameraManager.swift | 4 +- .../ARCamera/ARCameraViewController.swift | 2 +- .../ARCamera/TestCameraManager.swift | 4 +- .../ARCamera/TestCameraViewController.swift | 2 +- .../AttributeEstimationPipeline.swift | 2 +- .../SurfaceIntegrityExtension.swift | 2 +- .../Config/AccessibilityFeatureConfig.swift | 8 +- .../Definitions/DetectedFeature.swift | 1 + .../Annotation/AnnotationImageManager.swift | 12 +- .../Contour/ContourRequestProcessor.swift | 1 + .../Mesh/Definitions/MeshGPUDefinitions.swift | 26 --- .../SurfaceIntegrityFromImageExtension.swift | 16 +- .../SurfaceIntegrityFromMeshExtension.swift | 20 +-- .../AccessibilityFeatureClassSnapshot.swift | 2 +- ...DamageDetectionModelRequestProcessor.swift | 2 +- .../DepthEstimation/DepthModel.swift | 6 +- .../SegmentationModelRequestProcessor.swift | 4 +- .../Segmentation/SegmentationARPipeline.swift | 4 +- .../SegmentationAnnotationPipeline.swift | 1 + ...nstants.swift => SharedAppConstants.swift} | 16 +- .../TDEI/Auth/EnvironmentService.swift | 4 +- .../TDEI/Config/APIConstants.swift | 2 +- IOSAccessAssessment/View/ARCameraView.swift | 4 +- IOSAccessAssessment/View/AnnotationView.swift | 2 +- IOSAccessAssessment/View/SetupView.swift | 2 +- .../SubView/AnnotationFeatureDetailView.swift | 16 +- .../View/TestMode/TestCameraView.swift | 6 +- .../View/ViewModel/UserStateViewModel.swift | 8 +- .../View/ViewModel/WorkspaceViewModel.swift | 12 +- .../View/WorkspaceSelectionView.swift | 2 +- .../HomographyRequestProcessor.swift | 2 + .../HomographyTransformFilter.swift | 3 +- .../ImageProcessing/BinaryMaskFilter.swift | 3 +- .../DimensionBasedMaskFilter.swift | 4 +- .../GrayscaleToColorFilter.swift | 3 +- .../ImageProcessing/IntersectionFilter.swift | 3 +- .../UnionOfMasks/UnionOfMasksProcessor.swift | 4 +- .../Image/Utils/CVPixelBufferUtils.swift | 4 +- .../Utils/CenterCropTransformUtils.swift | 3 +- .../Mesh/Clustering/MeshClusteringUtils.swift | 6 +- .../Mesh/Definitions/MeshDefinitions.swift | 45 +++--- .../Mesh/Definitions/MeshGPUDefinitions.swift | 27 ++++ .../Mesh/Helpers/MeshHelpers.swift | 7 +- .../ComputerVision/Mesh/MeshGPUSnapshot.swift | 22 +-- .../Mesh/Utils/MeshRasterizer.swift | 6 +- .../Projection/Plane/Plane.metal | 0 .../Plane/PlaneAttributeProcessor.swift | 45 +++--- .../Projection/Plane/PlaneProcessor.swift | 50 +++--- .../Projection/Plane/PlaneRasterizer.swift | 9 +- .../Projection/ProjectionUtils.swift | 10 +- .../ProjectedWorldPointsExtension.swift | 3 +- .../Extensions/WorldPointsGridExtension.swift | 13 +- .../Projection/WorldPoints/WorldPoints.metal | 0 .../WorldPoints/WorldPointsProcessor.swift | 29 ++-- .../PointNMap/Shared/PointNMapConstants.swift | 69 ++++++++ .../Shared/Utils/MetalBufferUtils.swift | 0 57 files changed, 334 insertions(+), 377 deletions(-) delete mode 100644 IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift rename IOSAccessAssessment/Shared/{Constants.swift => SharedAppConstants.swift} (88%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift (71%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/Definitions/MeshDefinitions.swift (76%) create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/Helpers/MeshHelpers.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/MeshGPUSnapshot.swift (95%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/Utils/MeshRasterizer.swift (93%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/Plane/Plane.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift (95%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/Plane/PlaneProcessor.swift (93%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/Plane/PlaneRasterizer.swift (91%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/ProjectionUtils.swift (96%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift (96%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/WorldPoints/WorldPoints.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift (94%) create mode 100644 PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Shared/Utils/MetalBufferUtils.swift (100%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 421cab0a..3407adcc 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -27,9 +27,7 @@ A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */; }; A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */; }; A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */; }; - A30BED382ED162E7004A5B51 /* MeshDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED372ED162E2004A5B51 /* MeshDefinitions.swift */; }; A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */; }; - A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED3B2ED2F487004A5B51 /* MeshClusteringUtils.swift */; }; A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */; }; A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */; }; A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A30D05832E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage */; }; @@ -63,7 +61,6 @@ A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E012F26FA2700B96610 /* LocationExtension.swift */; }; A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */; }; A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; - A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */; }; A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */; }; @@ -82,14 +79,9 @@ A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05192EDFB015003C26CF /* OSMPayload.swift */; }; A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051B2EDFB093003C26CF /* OSMNode.swift */; }; A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051D2EDFB099003C26CF /* OSMWay.swift */; }; - A364B5D92F259AD700325E5C /* PlaneProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5D82F259AD600325E5C /* PlaneProcessor.swift */; }; - A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DC2F259AF900325E5C /* WorldPoints.metal */; }; - A364B5DF2F26DB5700325E5C /* WorldPointsProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */; }; A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A36C6E012E134CE600A86004 /* bisenetv2_35_640_640.mlpackage */; }; A374B4AC2F8EF654003E030D /* CurrentMappingData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */; }; A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */; }; - A37C3C182F3141FF001F4248 /* Plane.metal in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C172F3141F9001F4248 /* Plane.metal */; }; - A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37C3C192F3144F4001F4248 /* PlaneAttributeProcessor.swift */; }; A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E3B2EED60F300B07B77 /* PngEncoder.mm */; }; A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E392EED60F300B07B77 /* lodepng.cpp */; }; A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E942EFB66E600B07B77 /* CameraIntrinsicsCoder.swift */; }; @@ -98,8 +90,6 @@ A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */; }; A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */; }; A37E72102ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */; }; - A37E72142ED95C0C00CFE4EF /* MeshHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72132ED95C0900CFE4EF /* MeshHelpers.swift */; }; - A37E72162ED95CB400CFE4EF /* MeshGPUDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72152ED95CB100CFE4EF /* MeshGPUDefinitions.swift */; }; A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */; }; A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */; }; A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338BE2EDA889A00F1A402 /* CustomPicker.swift */; }; @@ -107,16 +97,11 @@ A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3A2DD9B03000455E45 /* OSMElement.swift */; }; A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; - A3A413A22EC9C3FA0039298C /* MeshRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */; }; A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A52ECD86260039298C /* AccessibilityFeature.swift */; }; A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413AC2ECF94950039298C /* DBSCAN.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; - A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */; }; A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */; }; A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */; }; - A3B5BDA02F831F270036C6EC /* ProjectionUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BD9F2F831F250036C6EC /* ProjectionUtils.swift */; }; - A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BDA22F83296C0036C6EC /* ProjectedWorldPointsExtension.swift */; }; - A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; @@ -130,7 +115,6 @@ A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */; }; A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D752E654F14003BFE78 /* ProfileView.swift */; }; - A3DA4DA82EB94D84005BB812 /* MeshGPUSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */; }; A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */; }; A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */; }; A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */; }; @@ -170,7 +154,7 @@ CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */ = {isa = PBXBuildFile; fileRef = CAF812BB2CF78F7C00D44B84 /* NetworkError.swift */; }; CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = CAF812C22CFA108100D44B84 /* UserStateViewModel.swift */; }; DA6332E72BAE3998009C80F9 /* espnetv2_pascal_256.mlmodel in Resources */ = {isa = PBXBuildFile; fileRef = 3222F94B2B62FF2E0019A079 /* espnetv2_pascal_256.mlmodel */; }; - DAA7F8B52CA38C11003666D8 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B42CA38C11003666D8 /* Constants.swift */; }; + DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B42CA38C11003666D8 /* SharedAppConstants.swift */; }; DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */; }; DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */; }; /* End PBXBuildFile section */ @@ -267,9 +251,7 @@ A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFeature.swift; sourceTree = ""; }; A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshInstancePolicy.swift; sourceTree = ""; }; A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttributeExtension.swift; sourceTree = ""; }; - A30BED372ED162E2004A5B51 /* MeshDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshDefinitions.swift; sourceTree = ""; }; A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConnectedComponents.swift; sourceTree = ""; }; - A30BED3B2ED2F487004A5B51 /* MeshClusteringUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshClusteringUtils.swift; sourceTree = ""; }; A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditableAccessibilityFeature.swift; sourceTree = ""; }; A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedAccessibilityFeature.swift; sourceTree = ""; }; A30D05832E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_11_640_640.mlpackage; sourceTree = ""; }; @@ -300,7 +282,6 @@ A3431E012F26FA2700B96610 /* LocationExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationExtension.swift; sourceTree = ""; }; A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherAttributeExtensionLegacy.swift; sourceTree = ""; }; A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; - A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalBufferUtils.swift; sourceTree = ""; }; A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpersExtension.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourRequestProcessor.swift; sourceTree = ""; }; @@ -319,14 +300,9 @@ A35E05192EDFB015003C26CF /* OSMPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMPayload.swift; sourceTree = ""; }; A35E051B2EDFB093003C26CF /* OSMNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMNode.swift; sourceTree = ""; }; A35E051D2EDFB099003C26CF /* OSMWay.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMWay.swift; sourceTree = ""; }; - A364B5D82F259AD600325E5C /* PlaneProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneProcessor.swift; sourceTree = ""; }; - A364B5DC2F259AF900325E5C /* WorldPoints.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = WorldPoints.metal; sourceTree = ""; }; - A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsProcessor.swift; sourceTree = ""; }; A36C6E012E134CE600A86004 /* bisenetv2_35_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_35_640_640.mlpackage; sourceTree = ""; }; A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappingData.swift; sourceTree = ""; }; A374FAB62EE0173200055268 /* OSMChangesetUploadResponseElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMChangesetUploadResponseElement.swift; sourceTree = ""; }; - A37C3C172F3141F9001F4248 /* Plane.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Plane.metal; sourceTree = ""; }; - A37C3C192F3144F4001F4248 /* PlaneAttributeProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneAttributeProcessor.swift; sourceTree = ""; }; A37E3E382EED60F300B07B77 /* lodepng.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = lodepng.h; sourceTree = ""; }; A37E3E392EED60F300B07B77 /* lodepng.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = lodepng.cpp; sourceTree = ""; }; A37E3E3A2EED60F300B07B77 /* PngEncoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngEncoder.h; sourceTree = ""; }; @@ -337,8 +313,6 @@ A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureClassSnapshot.swift; sourceTree = ""; }; A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppContext.swift; sourceTree = ""; }; A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationAnnotationPipeline.swift; sourceTree = ""; }; - A37E72132ED95C0900CFE4EF /* MeshHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshHelpers.swift; sourceTree = ""; }; - A37E72152ED95CB100CFE4EF /* MeshGPUDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshGPUDefinitions.swift; sourceTree = ""; }; A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturedMeshDefinitions.swift; sourceTree = ""; }; A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourFeatureRasterizer.swift; sourceTree = ""; }; A38338BE2EDA889A00F1A402 /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; @@ -346,16 +320,11 @@ A39C9F3A2DD9B03000455E45 /* OSMElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMElement.swift; sourceTree = ""; }; A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; - A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshRasterizer.swift; sourceTree = ""; }; A3A413A52ECD86260039298C /* AccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeature.swift; sourceTree = ""; }; A3A413AC2ECF94950039298C /* DBSCAN.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DBSCAN.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; - A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaneRasterizer.swift; sourceTree = ""; }; A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationModelRequestProcessor.swift; sourceTree = ""; }; A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionRasterizer.swift; sourceTree = ""; }; - A3B5BD9F2F831F250036C6EC /* ProjectionUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProjectionUtils.swift; sourceTree = ""; }; - A3B5BDA22F83296C0036C6EC /* ProjectedWorldPointsExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProjectedWorldPointsExtension.swift; sourceTree = ""; }; - A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorldPointsGridExtension.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; @@ -368,7 +337,6 @@ A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceViewModel.swift; sourceTree = ""; }; A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; - A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshGPUSnapshot.swift; sourceTree = ""; }; A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = MeshPipeline.metal; sourceTree = ""; }; A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationMeshRecord.swift; sourceTree = ""; }; @@ -409,7 +377,7 @@ CAA9477A2CDE70D5000C6918 /* KeychainService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = KeychainService.swift; sourceTree = ""; }; CAF812BB2CF78F7C00D44B84 /* NetworkError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetworkError.swift; sourceTree = ""; }; CAF812C22CFA108100D44B84 /* UserStateViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserStateViewModel.swift; sourceTree = ""; }; - DAA7F8B42CA38C11003666D8 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; + DAA7F8B42CA38C11003666D8 /* SharedAppConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppConstants.swift; sourceTree = ""; }; DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpinnerView.swift; sourceTree = ""; }; DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressBar.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -418,17 +386,7 @@ A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( - Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift, - Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal, - Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift, - Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift, - Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal, - Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift, + Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, ); platformFiltersByRelativePath = { PointNMapShared.h = ( @@ -448,27 +406,17 @@ ); target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; }; - A312FE222FA3F1C90044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { + A312FE932FA3F6860044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( - Sources/PointNMap/ComputerVision/Image/Contour/ContourUtils.swift, - Sources/PointNMap/ComputerVision/Image/Homography/Homography.metal, - Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift, - Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/ImageProcessing.metal, - Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift, - Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasks.metal, - Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift, + Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, ); target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; }; /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ - A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE222FA3F1C90044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; + A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE932FA3F6860044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; A312FD8B2FA3391C0044808E /* PointNMapSharedTests */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = PointNMapSharedTests; sourceTree = ""; }; A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE172FA3EBE80044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShaderTypes; sourceTree = ""; }; /* End PBXFileSystemSynchronizedRootGroup section */ @@ -618,7 +566,7 @@ A34509DB2FA1A7A7003157B0 /* Utils */, A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */, A355471D2EC1A47200F43AFD /* SharedAppData.swift */, - DAA7F8B42CA38C11003666D8 /* Constants.swift */, + DAA7F8B42CA38C11003666D8 /* SharedAppConstants.swift */, ); path = Shared; sourceTree = ""; @@ -708,20 +656,10 @@ path = Components; sourceTree = ""; }; - A30BED362ED162DD004A5B51 /* Clustering */ = { - isa = PBXGroup; - children = ( - A30BED3B2ED2F487004A5B51 /* MeshClusteringUtils.swift */, - ); - path = Clustering; - sourceTree = ""; - }; A30BED3D2ED2F614004A5B51 /* Definitions */ = { isa = PBXGroup; children = ( A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */, - A37E72152ED95CB100CFE4EF /* MeshGPUDefinitions.swift */, - A30BED372ED162E2004A5B51 /* MeshDefinitions.swift */, ); path = Definitions; sourceTree = ""; @@ -814,36 +752,12 @@ A32D66FF2F7EF83E00DC4173 /* Projection */ = { isa = PBXGroup; children = ( - A3B5BD9F2F831F250036C6EC /* ProjectionUtils.swift */, - A32D67012F7EF88000DC4173 /* WorldPoints */, - A32D67002F7EF87C00DC4173 /* Plane */, A3C1D73E2F882EE100833411 /* SurfaceIntegrity */, A3B5BD9E2F82FEA50036C6EC /* SurfaceNormals */, ); path = Projection; sourceTree = ""; }; - A32D67002F7EF87C00DC4173 /* Plane */ = { - isa = PBXGroup; - children = ( - A37C3C172F3141F9001F4248 /* Plane.metal */, - A3AC01AE2F294CCA00A1D0E5 /* PlaneRasterizer.swift */, - A364B5D82F259AD600325E5C /* PlaneProcessor.swift */, - A37C3C192F3144F4001F4248 /* PlaneAttributeProcessor.swift */, - ); - path = Plane; - sourceTree = ""; - }; - A32D67012F7EF88000DC4173 /* WorldPoints */ = { - isa = PBXGroup; - children = ( - A3B5BDA12F8329520036C6EC /* Extensions */, - A364B5DC2F259AF900325E5C /* WorldPoints.metal */, - A364B5DE2F26DB5300325E5C /* WorldPointsProcessor.swift */, - ); - path = WorldPoints; - sourceTree = ""; - }; A33EB5AC2F761B5D008ABFB7 /* Definitions */ = { isa = PBXGroup; children = ( @@ -886,7 +800,6 @@ A34509DB2FA1A7A7003157B0 /* Utils */ = { isa = PBXGroup; children = ( - A34509D92FA1A782003157B0 /* MetalBufferUtils.swift */, A34509D72FA1A6FA003157B0 /* SafeDeque.swift */, ); path = Utils; @@ -1060,14 +973,6 @@ path = Definitions; sourceTree = ""; }; - A37E72192ED95D2900CFE4EF /* Helpers */ = { - isa = PBXGroup; - children = ( - A37E72132ED95C0900CFE4EF /* MeshHelpers.swift */, - ); - path = Helpers; - sourceTree = ""; - }; A38338C02EDA9E3200F1A402 /* SubView */ = { isa = PBXGroup; children = ( @@ -1177,15 +1082,6 @@ path = SurfaceNormals; sourceTree = ""; }; - A3B5BDA12F8329520036C6EC /* Extensions */ = { - isa = PBXGroup; - children = ( - A3B5BDA42F8329A20036C6EC /* WorldPointsGridExtension.swift */, - A3B5BDA22F83296C0036C6EC /* ProjectedWorldPointsExtension.swift */, - ); - path = Extensions; - sourceTree = ""; - }; A3C1D73E2F882EE100833411 /* SurfaceIntegrity */ = { isa = PBXGroup; children = ( @@ -1209,10 +1105,6 @@ isa = PBXGroup; children = ( A30BED3D2ED2F614004A5B51 /* Definitions */, - A30BED362ED162DD004A5B51 /* Clustering */, - A37E72192ED95D2900CFE4EF /* Helpers */, - A3DA4DAF2EB99A55005BB812 /* Utils */, - A3DA4DA72EB94D81005BB812 /* MeshGPUSnapshot.swift */, A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */, A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */, A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */, @@ -1220,14 +1112,6 @@ path = Mesh; sourceTree = ""; }; - A3DA4DAF2EB99A55005BB812 /* Utils */ = { - isa = PBXGroup; - children = ( - A3A413A12EC9C3F60039298C /* MeshRasterizer.swift */, - ); - path = Utils; - sourceTree = ""; - }; A3DA4DC12EBE87B6005BB812 /* Utils */ = { isa = PBXGroup; children = ( @@ -1590,7 +1474,6 @@ A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */, A306462A2D614D9600B97D1B /* ImageSaver.swift in Sources */, A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */, - A3A413A22EC9C3FA0039298C /* MeshRasterizer.swift in Sources */, A32943502EE80EC400C4C1BC /* OSMRelation.swift in Sources */, A308015C2EC09BB700B1BA3A /* CityscapesClassConfig.swift in Sources */, A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */, @@ -1608,7 +1491,6 @@ A30801602EC09BB700B1BA3A /* VOCClassConfig.swift in Sources */, A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */, A30801612EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift in Sources */, - A30BED382ED162E7004A5B51 /* MeshDefinitions.swift in Sources */, A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */, A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */, A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, @@ -1621,7 +1503,6 @@ A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */, A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */, - A30BED3C2ED2F48B004A5B51 /* MeshClusteringUtils.swift in Sources */, A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */, A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */, @@ -1635,17 +1516,12 @@ A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */, A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */, A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, - A3AC01AF2F294CCD00A1D0E5 /* PlaneRasterizer.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */, A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */, A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */, - A3B5BDA02F831F270036C6EC /* ProjectionUtils.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, - A364B5DD2F259AFE00325E5C /* WorldPoints.metal in Sources */, - A3B5BDA52F8329A80036C6EC /* WorldPointsGridExtension.swift in Sources */, A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, - A37E72142ED95C0C00CFE4EF /* MeshHelpers.swift in Sources */, A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, A3C1D7352F84A78E00833411 /* SurfaceNormalsProcessor.swift in Sources */, A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */, @@ -1655,7 +1531,6 @@ 55659C102BB7863F0094DF01 /* SetupView.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A32D66FD2F7EF10F00DC4173 /* DamageDetectionPipeline.swift in Sources */, - A34509DA2FA1A782003157B0 /* MetalBufferUtils.swift in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */, A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, @@ -1670,11 +1545,9 @@ A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */, CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, - A3B5BDA32F8329740036C6EC /* ProjectedWorldPointsExtension.swift in Sources */, A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */, A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */, A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */, - A37C3C1A2F3144F7001F4248 /* PlaneAttributeProcessor.swift in Sources */, A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, @@ -1697,9 +1570,7 @@ A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, - A3DA4DA82EB94D84005BB812 /* MeshGPUSnapshot.swift in Sources */, A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */, - A37E72162ED95CB400CFE4EF /* MeshGPUDefinitions.swift in Sources */, A30F59CE2F7EFAC000EE7804 /* WidthExtension.swift in Sources */, DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */, @@ -1713,23 +1584,20 @@ A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */, - A364B5DF2F26DB5700325E5C /* WorldPointsProcessor.swift in Sources */, A374B4AC2F8EF654003E030D /* CurrentMappingData.swift in Sources */, A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */, A3FE16672E18C81800DAE5BE /* LocationCoder.swift in Sources */, A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */, A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */, A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */, - A37C3C182F3141FF001F4248 /* Plane.metal in Sources */, A32D66F72F7EE88300DC4173 /* v8n_175_16_960.mlpackage in Sources */, - A364B5D92F259AD700325E5C /* PlaneProcessor.swift in Sources */, A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */, A3C1D7492F886DDE00833411 /* SurfaceIntegrityFromMeshExtension.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */, A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */, - DAA7F8B52CA38C11003666D8 /* Constants.swift in Sources */, + DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/IOSAccessAssessment/ARCamera/ARCameraManager.swift b/IOSAccessAssessment/ARCamera/ARCameraManager.swift index ac8660b0..610a55c4 100644 --- a/IOSAccessAssessment/ARCamera/ARCameraManager.swift +++ b/IOSAccessAssessment/ARCamera/ARCameraManager.swift @@ -248,7 +248,7 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi self.metalContext = metalContext self.isEnhancedAnalysisEnabled = isEnhancedAnalysisEnabled self.meshGPUSnapshotGenerator = MeshGPUSnapshotGenerator(device: metalContext.device) - try setUpPreAllocatedPixelBufferPools(size: Constants.SelectedAccessibilityFeatureConfig.inputSize) + try setUpPreAllocatedPixelBufferPools(size: SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize) self.cameraOutputImageCallback = cameraOutputImageCallback self.isConfigured = true @@ -382,7 +382,7 @@ extension ARCameraManager { } /// Pre-process the image: orient, center-crop, and back to pixel buffer let originalSize: CGSize = image.extent.size - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) diff --git a/IOSAccessAssessment/ARCamera/ARCameraViewController.swift b/IOSAccessAssessment/ARCamera/ARCameraViewController.swift index 3cb6c1df..9ae80aa9 100644 --- a/IOSAccessAssessment/ARCamera/ARCameraViewController.swift +++ b/IOSAccessAssessment/ARCamera/ARCameraViewController.swift @@ -359,7 +359,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO ) { var totalVertexCount = 0 for accessibilityFeatureClass in accessibilityFeatureClasses { - guard Constants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { + guard SharedAppConstants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { print("Invalid segmentation class: \(accessibilityFeatureClass)") continue } diff --git a/IOSAccessAssessment/ARCamera/TestCameraManager.swift b/IOSAccessAssessment/ARCamera/TestCameraManager.swift index af4ce958..ed1b1fcb 100644 --- a/IOSAccessAssessment/ARCamera/TestCameraManager.swift +++ b/IOSAccessAssessment/ARCamera/TestCameraManager.swift @@ -78,7 +78,7 @@ final class TestCameraManager: NSObject, ObservableObject, TestCameraProcessingD self.metalContext = metalContext self.isEnhancedAnalysisEnabled = isEnhancedAnalysisEnabled self.meshGPUSnapshotGenerator = MeshGPUSnapshotGenerator(device: metalContext.device) -// try setUpPreAllocatedPixelBufferPools(size: Constants.SelectedAccessibilityFeatureConfig.inputSize) +// try setUpPreAllocatedPixelBufferPools(size: SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize) self.cameraOutputImageCallback = cameraOutputImageCallback self.isConfigured = true @@ -207,7 +207,7 @@ extension TestCameraManager { guard let segmentationPipeline = segmentationPipeline else { throw ARCameraManagerError.segmentationNotConfigured } - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) diff --git a/IOSAccessAssessment/ARCamera/TestCameraViewController.swift b/IOSAccessAssessment/ARCamera/TestCameraViewController.swift index a03b8cc9..d1aeda05 100644 --- a/IOSAccessAssessment/ARCamera/TestCameraViewController.swift +++ b/IOSAccessAssessment/ARCamera/TestCameraViewController.swift @@ -331,7 +331,7 @@ final class TestCameraViewController: UIViewController, TestCameraProcessingOutp ) { var totalVertexCount = 0 for accessibilityFeatureClass in accessibilityFeatureClasses { - guard Constants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { + guard SharedAppConstants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { print("Invalid segmentation class: \(accessibilityFeatureClass)") continue } diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index d75ceb87..65f0cb14 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -216,7 +216,7 @@ class AttributeEstimationPipeline: ObservableObject { accessibilityFeature: EditableAccessibilityFeature ) { /// Threshold needs to be in Map Units - let distanceThreshold = Constants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) + let distanceThreshold = SharedAppConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) guard let LocationDetails = accessibilityFeature.locationDetails else { accessibilityFeature.setIsExisting(false) return diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift index 25911ed2..e0399d9c 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift @@ -104,7 +104,7 @@ extension AttributeEstimationPipeline { } /// Run damage detection let cameraImage = captureImageData.cameraImage - let croppedSize = Constants.DamageDetectionConstants.inputSize + let croppedSize = SharedAppConstants.DamageDetectionConstants.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: captureImageData.interfaceOrientation ) diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift index bb40d9e1..3d21bc06 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift @@ -14,7 +14,7 @@ struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, let name: String /** - Segmentation-related constants + Segmentation-related SharedAppConstants */ /// Grayscale value output for the accessibility feature class, by the relevant segmentation model let grayscaleValue: Float @@ -24,13 +24,13 @@ struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, let color: CIColor /** - Constants related to mesh + SharedAppConstants related to mesh */ /// Optional mesh classification for the segmentation class let meshClassification: Set /** - Post-Processing related constants. + Post-Processing related SharedAppConstants. */ /// Optional bounds for the segmentation class. Is kept optional to prevent unnecessary dimension based masking. let bounds: CGRect? @@ -44,7 +44,7 @@ struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, let experimentalAttributes: Set /** - Mapping-related constants + Mapping-related SharedAppConstants */ let oswPolicy: OSWPolicy diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift index 0265cedc..f99e17a2 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift @@ -4,6 +4,7 @@ // // Created by Himanshu on 11/9/25. // +import PointNMapShared struct ContourDetails: Sendable, Codable, Equatable, Hashable { let centroid: CGPoint diff --git a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift index 3f383f73..238df3e4 100644 --- a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift +++ b/IOSAccessAssessment/Annotation/AnnotationImageManager.swift @@ -303,7 +303,7 @@ extension AnnotationImageManager { let cameraImage = captureImageData.cameraImage let interfaceOrientation = captureImageData.interfaceOrientation // let originalSize = captureImageData.originalSize - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -401,7 +401,7 @@ extension AnnotationImageManager { ) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -512,7 +512,7 @@ extension AnnotationImageManager { interfaceOrientation: UIInterfaceOrientation ) throws -> CIImage { let raterizedFeaturesCIImage = CIImage(cgImage: raterizedFeaturesImage) - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -550,7 +550,7 @@ extension AnnotationImageManager { let rasterizedMeshCIImage = CIImage(cgImage: rasterizedMeshImage) let interfaceOrientation = captureMeshData.interfaceOrientation - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -625,7 +625,7 @@ extension AnnotationImageManager { ) else { return nil } let rasterizedPlaneCIImage = CIImage(cgImage: rasterizedPlaneCGImage) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) @@ -651,7 +651,7 @@ extension AnnotationImageManager { ) else { return nil } let rasterizedDamageDetectionCIImage = CIImage(cgImage: rasterizedDamageDetectionCGImage) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = Constants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift index 73d8672e..1ae4b61a 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift +++ b/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift @@ -6,6 +6,7 @@ // import Vision import CoreImage +import PointNMapShared public enum ContourRequestProcessorError: Error, LocalizedError { case contourProcessingFailed diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift b/IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift deleted file mode 100644 index 93ea7378..00000000 --- a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift +++ /dev/null @@ -1,26 +0,0 @@ -// -// MeshGPUDefinitions.swift -// IOSAccessAssessment -// -// Created by Himanshu on 11/27/25. -// -import Foundation - -struct MeshGPUAnchor { - var vertexBuffer: MTLBuffer - var indexBuffer: MTLBuffer - var classificationBuffer: MTLBuffer? = nil - var anchorTransform: simd_float4x4 - var vertexCount: Int = 0 - var indexCount: Int = 0 - var faceCount: Int = 0 - var generation: Int = 0 -} - -struct MeshGPUSnapshot { - let vertexStride: Int - let vertexOffset: Int - let indexStride: Int - let classificationStride: Int - let anchors: [UUID: MeshGPUAnchor] -} diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift index 10e97150..d5b310e6 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift +++ b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift @@ -20,8 +20,8 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = Constants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold ) throws -> IntegrityStatusDetails { guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { throw SurfaceIntegrityProcessorError.metalPipelineCreationError @@ -109,7 +109,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAngularStdThreshold: Float = Constants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold + boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -245,8 +245,8 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = Constants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold ) throws -> IntegrityStatusDetails { let width = surfaceNormalsForPointsGrid.width let height = surfaceNormalsForPointsGrid.height @@ -273,7 +273,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, bounds: BoundsParams, - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold ) -> (deviantPointCount: Int, totalPointCount: Int) { let planeNormal = plane.normalVector var totalDeviantPoints = 0 @@ -306,7 +306,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAreaThreshold: Float = Constants.SurfaceIntegrityConstants.imageBoundingBoxAreaThreshold, + boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAreaThreshold, boundingBoxWorldPointRetrievalRadius: Float = 3.0 ) throws -> IntegrityStatusDetails { let width = surfaceNormalsForPointsGrid.width @@ -387,7 +387,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAngularStdThreshold: Float = Constants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold + boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift index e433e68a..411cc02c 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift +++ b/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift @@ -16,8 +16,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = Constants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold ) throws -> IntegrityStatusDetails { guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { throw SurfaceIntegrityProcessorError.metalPipelineCreationError @@ -91,7 +91,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - boundingBoxAreaThreshold: Float = Constants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold + boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -121,8 +121,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - boundingBoxAngularStdThreshold: Float = Constants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -324,8 +324,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - deviantPolygonProportion: Float = Constants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + deviantPolygonProportion: Float = SharedAppConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold ) throws -> IntegrityStatusDetails { let planeNormal = plane.normalVector var totalDeviantPolygons = 0 @@ -351,7 +351,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - boundingBoxAreaThreshold: Float = Constants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold + boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold ) throws -> IntegrityStatusDetails { let viewMatrix = captureData.cameraTransform.inverse let totalBoundingBoxes = damageDetectionResults.count @@ -404,8 +404,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = Constants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - boundingBoxAngularStdThreshold: Float = Constants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold + angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let viewMatrix = captureData.cameraTransform.inverse let planeNormal = plane.normalVector diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift index 1c2a1ff4..50affd19 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift @@ -19,7 +19,7 @@ struct AccessibilityFeatureClassSnapshot: Codable, Identifiable, Sendable { /// Get AccessibilityFeatureClass from snapshot func getAccessibilityFeatureClass() -> AccessibilityFeatureClass? { - let matchedClass = Constants.SelectedAccessibilityFeatureConfig.classes.first { $0.id == self.id } + let matchedClass = SharedAppConstants.SelectedAccessibilityFeatureConfig.classes.first { $0.id == self.id } return matchedClass } } diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift b/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift index edde4270..f717b128 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift +++ b/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift @@ -70,7 +70,7 @@ struct DamageDetectionModelRequestProcessor { var visionModel: VNCoreMLModel init() throws { - guard let modelURL = Constants.DamageDetectionConstants.damageDetectionModelURL else { + guard let modelURL = SharedAppConstants.DamageDetectionConstants.damageDetectionModelURL else { throw DamageDetectionModelError.modelLoadingError } let configuration: MLModelConfiguration = MLModelConfiguration() diff --git a/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift b/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift index c2d195a9..f61f77cd 100644 --- a/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift +++ b/IOSAccessAssessment/MachineLearning/DepthEstimation/DepthModel.swift @@ -54,8 +54,8 @@ class DepthModel: ObservableObject { var buffer: CVPixelBuffer! let status = CVPixelBufferCreate( kCFAllocatorDefault, - Int(Constants.DepthConstants.inputSize.width), - Int(Constants.DepthConstants.inputSize.height), + Int(SharedAppConstants.DepthConstants.inputSize.width), + Int(SharedAppConstants.DepthConstants.inputSize.height), kCVPixelFormatType_32ARGB, nil, &buffer @@ -88,7 +88,7 @@ class DepthModel: ObservableObject { return CIImage(cvPixelBuffer: CVPixelBufferUtils.createBlankDepthPixelBuffer(targetSize: originalSize)!) } - let inputImage = ciImage.resized(to: Constants.DepthConstants.inputSize) + let inputImage = ciImage.resized(to: SharedAppConstants.DepthConstants.inputSize) context.render(inputImage, to: inputPixelBuffer) guard let result = try? visionModel.prediction(image: inputPixelBuffer) else { return CIImage(cvPixelBuffer: CVPixelBufferUtils.createBlankDepthPixelBuffer(targetSize: originalSize)!) diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift b/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift index a9fa1deb..785f9ca5 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift @@ -33,7 +33,7 @@ struct SegmentationModelRequestProcessor { var selectedClasses: [AccessibilityFeatureClass] = [] init(selectedClasses: [AccessibilityFeatureClass]) throws { - guard let modelURL = Constants.SelectedAccessibilityFeatureConfig.modelURL else { + guard let modelURL = SharedAppConstants.SelectedAccessibilityFeatureConfig.modelURL else { throw SegmentationModelError.modelLoadingError } let configuration: MLModelConfiguration = MLModelConfiguration() @@ -71,7 +71,7 @@ struct SegmentationModelRequestProcessor { let uniqueGrayScaleValues = CVPixelBufferUtils.extractUniqueGrayscaleValues(from: segmentationBuffer) - let grayscaleValuesToClassMap = Constants.SelectedAccessibilityFeatureConfig.labelToClassMap + let grayscaleValuesToClassMap = SharedAppConstants.SelectedAccessibilityFeatureConfig.labelToClassMap var segmentedClasses = uniqueGrayScaleValues.compactMap { grayscaleValuesToClassMap[$0] } let segmentedClassSet = Set(segmentedClasses) segmentedClasses = self.selectedClasses.filter{ segmentedClassSet.contains($0) } diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift index c0202924..b6a5c361 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -190,8 +190,8 @@ final class SegmentationARPipeline: ObservableObject { var depthFilteredSegmentationImage: CIImage? = nil if let depthImage, let depthFilter = self.depthFilter { // Apply depth filtering to the segmentation image - let depthMinThresholdValue = Constants.DepthConstants.depthMinThreshold - let depthMaxThresholdValue = Constants.DepthConstants.depthMaxThreshold + let depthMinThresholdValue = SharedAppConstants.DepthConstants.depthMinThreshold + let depthMaxThresholdValue = SharedAppConstants.DepthConstants.depthMaxThreshold depthFilteredSegmentationImage = try depthFilter.apply( to: segmentationImage, depthImage: depthImage, depthMinThreshold: depthMinThresholdValue, depthMaxThreshold: depthMaxThresholdValue diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift index 0928b815..1fb0c0ee 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift +++ b/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift @@ -10,6 +10,7 @@ import Vision import OrderedCollections import simd +import PointNMapShared enum SegmentationAnnotationPipelineError: Error, LocalizedError { case isProcessingTrue diff --git a/IOSAccessAssessment/Shared/Constants.swift b/IOSAccessAssessment/Shared/SharedAppConstants.swift similarity index 88% rename from IOSAccessAssessment/Shared/Constants.swift rename to IOSAccessAssessment/Shared/SharedAppConstants.swift index 5522d0c1..f684a011 100644 --- a/IOSAccessAssessment/Shared/Constants.swift +++ b/IOSAccessAssessment/Shared/SharedAppConstants.swift @@ -1,5 +1,5 @@ // -// Constants.swift +// SharedAppConstants.swift // IOSAccessAssessment // // Created by TCAT on 9/24/24. @@ -8,35 +8,35 @@ import SwiftUI /** - Global Constants used across the app. + Global SharedAppConstants used across the app. */ -struct Constants { +struct SharedAppConstants { // Supported Classes static let SelectedAccessibilityFeatureConfig: AccessibilityFeatureClassConfig = AccessibilityFeatureConfig.mapillaryCustom11Config struct DepthConstants { - /// Model-specific constants + /// Model-specific SharedAppConstants static let inputSize: CGSize = CGSize(width: 518, height: 392) - /// General constants + /// General SharedAppConstants static let depthMinThreshold: Float = 0.0 static let depthMaxThreshold: Float = 5.0 } struct DamageDetectionConstants { - /// Model-specific constants + /// Model-specific SharedAppConstants static let damageDetectionModelURL: URL? = Bundle.main.url(forResource: "v8n_175_16_960", withExtension: "mlmodelc") static let inputSize: CGSize = CGSize(width: 640, height: 640) } struct SurfaceIntegrityConstants { - /// Image (world point) based constants + /// Image (world point) based SharedAppConstants static let imagePlaneAngularDeviationThreshold: Float = 15.0 // Unit: degrees static let imageDeviantPointProportionThreshold: Float = 0.1 // Unit: percentage (0 to 1) static let imageBoundingBoxAreaThreshold: Float = 0.1 // Unit: m2 static let imageBoundingBoxAngularStdThreshold: Float = 10.0 // Unit: degrees - /// Mesh based constants + /// Mesh based SharedAppConstants static let meshPlaneAngularDeviationThreshold: Float = 7.5 // Unit: degrees static let meshDeviantPolygonProportionThreshold: Float = 0.1 // Unit: percentage (0 to 1) static let meshBoundingBoxAreaThreshold: Float = 0.1 // Unit: m2 diff --git a/IOSAccessAssessment/TDEI/Auth/EnvironmentService.swift b/IOSAccessAssessment/TDEI/Auth/EnvironmentService.swift index 92d48676..901e62f0 100644 --- a/IOSAccessAssessment/TDEI/Auth/EnvironmentService.swift +++ b/IOSAccessAssessment/TDEI/Auth/EnvironmentService.swift @@ -12,14 +12,14 @@ class EnvironmentService { var environment: APIEnvironment { get { - if let savedValue = UserDefaults.standard.string(forKey: Constants.UserDefaultsKeys.selectedEnvironmentKey), + if let savedValue = UserDefaults.standard.string(forKey: SharedAppConstants.UserDefaultsKeys.selectedEnvironmentKey), let savedEnvironment = APIEnvironment(rawValue: savedValue) { return savedEnvironment } return .staging // default value } set { - UserDefaults.standard.set(newValue.rawValue, forKey: Constants.UserDefaultsKeys.selectedEnvironmentKey) + UserDefaults.standard.set(newValue.rawValue, forKey: SharedAppConstants.UserDefaultsKeys.selectedEnvironmentKey) } } } diff --git a/IOSAccessAssessment/TDEI/Config/APIConstants.swift b/IOSAccessAssessment/TDEI/Config/APIConstants.swift index dbaadf08..7dfd5508 100644 --- a/IOSAccessAssessment/TDEI/Config/APIConstants.swift +++ b/IOSAccessAssessment/TDEI/Config/APIConstants.swift @@ -1,5 +1,5 @@ // -// Constants.swift +// SharedAppConstants.swift // IOSAccessAssessment // // Created by Himanshu on 5/18/25. diff --git a/IOSAccessAssessment/View/ARCameraView.swift b/IOSAccessAssessment/View/ARCameraView.swift index 97b73628..19a2779c 100644 --- a/IOSAccessAssessment/View/ARCameraView.swift +++ b/IOSAccessAssessment/View/ARCameraView.swift @@ -361,7 +361,7 @@ struct ARCameraView: View { var shouldUpdateMap = oldLocation == nil && newLocation != nil if let oldLocation, let newLocation { let distance = oldLocation.distance(from: newLocation) - shouldUpdateMap = distance > Constants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters + shouldUpdateMap = distance > SharedAppConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters } if !shouldUpdateMap { return @@ -379,7 +379,7 @@ struct ARCameraView: View { let mapData = try await WorkspaceService.shared.fetchMapData( workspaceId: workspaceId, location: location, - radius: Constants.WorkspaceConstants.fetchRadiusInMeters, + radius: SharedAppConstants.WorkspaceConstants.fetchRadiusInMeters, accessToken: accessToken, environment: userStateViewModel.selectedEnvironment ) diff --git a/IOSAccessAssessment/View/AnnotationView.swift b/IOSAccessAssessment/View/AnnotationView.swift index 8811a4da..2ab69c30 100644 --- a/IOSAccessAssessment/View/AnnotationView.swift +++ b/IOSAccessAssessment/View/AnnotationView.swift @@ -643,7 +643,7 @@ struct AnnotationView: View { let mapData = try await WorkspaceService.shared.fetchMapData( workspaceId: workspaceId, location: captureLocation, - radius: Constants.WorkspaceConstants.fetchRadiusInMeters, + radius: SharedAppConstants.WorkspaceConstants.fetchRadiusInMeters, accessToken: accessToken, environment: userStateViewModel.selectedEnvironment ) diff --git a/IOSAccessAssessment/View/SetupView.swift b/IOSAccessAssessment/View/SetupView.swift index 977921cc..f4299c25 100644 --- a/IOSAccessAssessment/View/SetupView.swift +++ b/IOSAccessAssessment/View/SetupView.swift @@ -316,7 +316,7 @@ struct SetupView: View { } List { - ForEach(Constants.SelectedAccessibilityFeatureConfig.classes, id: \.self) { accessibilityFeatureClass in + ForEach(SharedAppConstants.SelectedAccessibilityFeatureConfig.classes, id: \.self) { accessibilityFeatureClass in Button(action: { if self.selectedClasses.contains(accessibilityFeatureClass) { self.selectedClasses.remove(accessibilityFeatureClass) diff --git a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift index eacc2c4a..2a374a4b 100644 --- a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift +++ b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift @@ -13,7 +13,7 @@ import PointNMapShared Sub-view of the `AnnotationView`. */ struct AnnotationFeatureDetailView: View { - enum Constants { + enum SharedAppConstants { enum Texts { /// Alert texts static let statusAlertTitleKey: String = "Error" @@ -119,13 +119,13 @@ struct AnnotationFeatureDetailView: View { Text( locationFormatter.string( from: NSNumber(value: featureLocation.latitude) - ) ?? AnnotationFeatureDetailView.Constants.Texts.invalidTextKey + ) ?? AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey ) .padding(.horizontal) Text( locationFormatter.string( from: NSNumber(value: featureLocation.longitude) - ) ?? AnnotationFeatureDetailView.Constants.Texts.invalidTextKey + ) ?? AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey ) .padding(.horizontal) Spacer() @@ -139,7 +139,7 @@ struct AnnotationFeatureDetailView: View { accessibilityFeature.setIsExisting(newValue) } )) { - Text(AnnotationFeatureDetailView.Constants.Texts.isExistingTitle) + Text(AnnotationFeatureDetailView.SharedAppConstants.Texts.isExistingTitle) } .disabled(accessibilityFeature.oswElement == nil) .foregroundStyle(accessibilityFeature.oswElement == nil ? .secondary : .primary) @@ -159,7 +159,7 @@ struct AnnotationFeatureDetailView: View { } } } else { - Text(AnnotationFeatureDetailView.Constants.Texts.invalidTextKey) + Text(AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey) .foregroundStyle(.secondary) } } @@ -291,7 +291,7 @@ struct AnnotationFeatureDetailView: View { HStack { Label( attributeStatus.errorMessage, - systemImage: AnnotationFeatureDetailView.Constants.Images.statusAlertImageNameKey + systemImage: AnnotationFeatureDetailView.SharedAppConstants.Images.statusAlertImageNameKey ) .foregroundStyle(.red) .font(.caption) @@ -335,7 +335,7 @@ struct AnnotationFeatureDetailView: View { guard let attributeValue = accessibilityFeature.experimentalAttributeValues[attribute], let attributeValue, let attributeBindableValue = attributeValue.toDouble() else { - return AnnotationFeatureDetailView.Constants.Texts.invalidTextKey + return AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey } return String(attributeBindableValue) }() @@ -345,7 +345,7 @@ struct AnnotationFeatureDetailView: View { HStack { Label( attributeStatus.errorMessage, - systemImage: AnnotationFeatureDetailView.Constants.Images.statusAlertImageNameKey + systemImage: AnnotationFeatureDetailView.SharedAppConstants.Images.statusAlertImageNameKey ) .foregroundStyle(.red) .font(.caption) diff --git a/IOSAccessAssessment/View/TestMode/TestCameraView.swift b/IOSAccessAssessment/View/TestMode/TestCameraView.swift index 21f12e3d..81b93325 100644 --- a/IOSAccessAssessment/View/TestMode/TestCameraView.swift +++ b/IOSAccessAssessment/View/TestMode/TestCameraView.swift @@ -10,7 +10,7 @@ import CoreLocation import PointNMapShared /** - Additional constants unique to TestCameraView (not used in ARCameraView) + Additional SharedAppConstants unique to TestCameraView (not used in ARCameraView) */ enum TestCameraViewConstants { enum Texts { @@ -429,7 +429,7 @@ struct TestCameraView: View { var shouldUpdateMap = oldLocation == nil && newLocation != nil if let oldLocation, let newLocation { let distance = oldLocation.distance(from: newLocation) - shouldUpdateMap = distance > Constants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters + shouldUpdateMap = distance > SharedAppConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters } if !shouldUpdateMap { return @@ -447,7 +447,7 @@ struct TestCameraView: View { let mapData = try await WorkspaceService.shared.fetchMapData( workspaceId: workspaceId, location: location, - radius: Constants.WorkspaceConstants.fetchRadiusInMeters, + radius: SharedAppConstants.WorkspaceConstants.fetchRadiusInMeters, accessToken: accessToken, environment: userStateViewModel.selectedEnvironment ) diff --git a/IOSAccessAssessment/View/ViewModel/UserStateViewModel.swift b/IOSAccessAssessment/View/ViewModel/UserStateViewModel.swift index e14fef7c..c76aae3d 100644 --- a/IOSAccessAssessment/View/ViewModel/UserStateViewModel.swift +++ b/IOSAccessAssessment/View/ViewModel/UserStateViewModel.swift @@ -30,7 +30,7 @@ class UserStateViewModel: ObservableObject { */ @Published var isEnhancedAnalysisEnabled: Bool { didSet { - UserDefaults.standard.set(isEnhancedAnalysisEnabled, forKey: Constants.UserDefaultsKeys.isEnhancedAnalysisEnabledKey) + UserDefaults.standard.set(isEnhancedAnalysisEnabled, forKey: SharedAppConstants.UserDefaultsKeys.isEnhancedAnalysisEnabledKey) } } @@ -43,15 +43,15 @@ class UserStateViewModel: ObservableObject { */ @Published var appMode: AppMode { didSet { - UserDefaults.standard.set(appMode.rawValue, forKey: Constants.UserDefaultsKeys.appModeKey) + UserDefaults.standard.set(appMode.rawValue, forKey: SharedAppConstants.UserDefaultsKeys.appModeKey) } } init() { self.selectedEnvironment = environmentService.environment self.isAuthenticated = authService.checkTokenValid() - self.isEnhancedAnalysisEnabled = UserDefaults.standard.bool(forKey: Constants.UserDefaultsKeys.isEnhancedAnalysisEnabledKey) - if let savedAppModeRawValue = UserDefaults.standard.string(forKey: Constants.UserDefaultsKeys.appModeKey), + self.isEnhancedAnalysisEnabled = UserDefaults.standard.bool(forKey: SharedAppConstants.UserDefaultsKeys.isEnhancedAnalysisEnabledKey) + if let savedAppModeRawValue = UserDefaults.standard.string(forKey: SharedAppConstants.UserDefaultsKeys.appModeKey), let savedAppMode = AppMode(rawValue: savedAppModeRawValue) { self.appMode = savedAppMode } else { diff --git a/IOSAccessAssessment/View/ViewModel/WorkspaceViewModel.swift b/IOSAccessAssessment/View/ViewModel/WorkspaceViewModel.swift index 5abe33ca..3912df7d 100644 --- a/IOSAccessAssessment/View/ViewModel/WorkspaceViewModel.swift +++ b/IOSAccessAssessment/View/ViewModel/WorkspaceViewModel.swift @@ -24,8 +24,8 @@ class WorkspaceViewModel: ObservableObject { var changesetId: String? = nil init() { - if let savedWorkspaceId = UserDefaults.standard.string(forKey: Constants.UserDefaultsKeys.selectedWorkspaceIdKey), - let savedWorkspaceTitle = UserDefaults.standard.string(forKey: Constants.UserDefaultsKeys.selectedWorkspaceTitleKey) { + if let savedWorkspaceId = UserDefaults.standard.string(forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceIdKey), + let savedWorkspaceTitle = UserDefaults.standard.string(forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceTitleKey) { self.workspaceId = savedWorkspaceId self.workspaceTitle = savedWorkspaceTitle self.isWorkspaceSelected = true @@ -40,8 +40,8 @@ class WorkspaceViewModel: ObservableObject { self.workspaceId = id self.workspaceTitle = title self.isWorkspaceSelected = true - UserDefaults.standard.set(workspaceId, forKey: Constants.UserDefaultsKeys.selectedWorkspaceIdKey) - UserDefaults.standard.set(title, forKey: Constants.UserDefaultsKeys.selectedWorkspaceTitleKey) + UserDefaults.standard.set(workspaceId, forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceIdKey) + UserDefaults.standard.set(title, forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceTitleKey) } func updateChangeset(id: String) { @@ -52,7 +52,7 @@ class WorkspaceViewModel: ObservableObject { self.workspaceId = nil self.changesetId = nil self.isWorkspaceSelected = false - UserDefaults.standard.removeObject(forKey: Constants.UserDefaultsKeys.selectedWorkspaceIdKey) - UserDefaults.standard.removeObject(forKey: Constants.UserDefaultsKeys.selectedWorkspaceTitleKey) + UserDefaults.standard.removeObject(forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceIdKey) + UserDefaults.standard.removeObject(forKey: SharedAppConstants.UserDefaultsKeys.selectedWorkspaceTitleKey) } } diff --git a/IOSAccessAssessment/View/WorkspaceSelectionView.swift b/IOSAccessAssessment/View/WorkspaceSelectionView.swift index ef6fc8ea..b4f739a5 100644 --- a/IOSAccessAssessment/View/WorkspaceSelectionView.swift +++ b/IOSAccessAssessment/View/WorkspaceSelectionView.swift @@ -241,7 +241,7 @@ struct WorkspaceSelectionView: View { ) // MARK: Eventually, we should ensure that even primary workspaces have externalAppAccess enabled let primaryWorkspaces = workspaces.filter { workspace in - return Constants.WorkspaceConstants.primaryWorkspaceIds.contains("\(workspace.id)") + return SharedAppConstants.WorkspaceConstants.primaryWorkspaceIds.contains("\(workspace.id)") } workspaces = workspaces.filter { workspace in return workspace.externalAppAccess == 1 diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift index 96bded64..67a010eb 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyRequestProcessor.swift @@ -20,6 +20,8 @@ public enum HomographyRequestProcessorError: Error, LocalizedError { } public struct HomographyRequestProcessor { + public init() {} + /// Computes the homography transform for the reference image and the floating image. public func getHomographyTransform( referenceImage: CIImage, floatingImage: CIImage, orientation: CGImagePropertyOrientation = .up diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift index 3e3206fe..b44b5310 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Homography/HomographyTransformFilter.swift @@ -56,7 +56,8 @@ public struct HomographyTransformFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "homographyWarpKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "homographyWarpKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw HomographyTransformFilterError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift index 3a3a7406..bb1c0f45 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/BinaryMaskFilter.swift @@ -54,7 +54,8 @@ public struct BinaryMaskFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "binaryMaskingKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "binaryMaskingKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw BinaryMaskFilterError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift index fea08363..311f5733 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/DimensionBasedMaskFilter.swift @@ -10,6 +10,7 @@ import UIKit import Metal import CoreImage import MetalKit +import PointNMapShaderTypes public enum DimensionBasedMaskFilterError: Error, LocalizedError { case metalInitializationFailed @@ -60,7 +61,8 @@ public struct DimensionBasedMaskFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "dimensionBasedMaskingKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "dimensionBasedMaskingKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw DimensionBasedMaskFilterError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift index 15572a3d..81931e1a 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/GrayscaleToColorFilter.swift @@ -54,7 +54,8 @@ public struct GrayscaleToColorFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "colorMatchingKernelLUT"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "colorMatchingKernelLUT"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw GrayscaleToColorFilterError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift index 87035809..96954425 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/ImageProcessing/IntersectionFilter.swift @@ -54,7 +54,8 @@ public struct IntersectionFilter { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "intersectionTextureKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "intersectionTextureKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw IntersectionFilterError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift index 5c959ad9..7c4a7629 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/UnionOfMasks/UnionOfMasksProcessor.swift @@ -8,7 +8,6 @@ import UIKit import Metal import CoreImage import MetalKit -import PointNMapShared public enum UnionOfMasksProcessorError: Error, LocalizedError { case metalInitializationFailed @@ -69,7 +68,8 @@ public class UnionOfMasksProcessor { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "unionOfMasksKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "unionOfMasksKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw UnionOfMasksProcessorError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift index 8e3e0184..7101ff2f 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CVPixelBufferUtils.swift @@ -53,9 +53,7 @@ public struct CVPixelBufferUtils { } /** - This function extracts unique grayscale values from a pixel buffer, - gets the indices of these values from Constants.SelectedAccessibilityFeatureConfig.grayscaleValues, - and returns both the unique values and their corresponding indices. + This function extracts unique grayscale values from a pixel buffer */ public static func extractUniqueGrayscaleValues(from pixelBuffer: CVPixelBuffer) -> Set { CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift index aab35392..04ecfa83 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Utils/CenterCropTransformUtils.swift @@ -89,7 +89,8 @@ public struct CenterCropTransformUtils { guard let destTexture = self.device.makeTexture(descriptor: descriptor) else { throw CenterCropTransformUtilsError.textureCreationFailed } - guard let kernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "revertCenterCropAspectFitKernel"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let kernelFunction = library.makeFunction(name: "revertCenterCropAspectFitKernel"), let pipeline = try? device.makeComputePipelineState(function: kernelFunction) else { throw CenterCropTransformUtilsError.metalInitializationFailed } diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift similarity index 71% rename from IOSAccessAssessment/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift index 4caec790..127bff04 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Clustering/MeshClusteringUtils.swift @@ -8,12 +8,12 @@ import Foundation import simd -struct MeshClusteringUtils { - static func distanceFunction(polygonA: MeshPolygon, polygonB: MeshPolygon) -> Float { +public struct MeshClusteringUtils { + public static func distanceFunction(polygonA: MeshPolygon, polygonB: MeshPolygon) -> Float { return simd_distance(polygonA.centroid, polygonB.centroid) } - static func adjacencyFunction(polygonA: MeshPolygon, polygonB: MeshPolygon, threshold: Float) -> Bool { + public static func adjacencyFunction(polygonA: MeshPolygon, polygonB: MeshPolygon, threshold: Float) -> Bool { for vertexA in polygonA.vertices { for vertexB in polygonB.vertices { /// Check if the vertex is the same. We use a small epsilon to account for floating point errors diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshDefinitions.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift similarity index 76% rename from IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshDefinitions.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift index dcacdc88..2e159309 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/MeshDefinitions.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift @@ -7,17 +7,18 @@ import Foundation import simd +import PointNMapShaderTypes -struct MeshContents: Sendable { - var positions: [packed_float3] - var indices: [UInt32] - var classifications: [UInt8]? = nil - var colorR8: Int - var colorG8: Int - var colorB8: Int +public struct MeshContents: Sendable { + public var positions: [packed_float3] + public var indices: [UInt32] + public var classifications: [UInt8]? = nil + public var colorR8: Int + public var colorG8: Int + public var colorB8: Int /// - Warning: Ideally, this property should be avoided for performance reasons. - var polygons: [MeshPolygon] { + public var polygons: [MeshPolygon] { var result: [MeshPolygon] = [] for i in stride(from: 0, to: indices.count, by: 3) { let i0 = Int(indices[i]) @@ -41,7 +42,7 @@ struct MeshContents: Sendable { } /// TODO: The efficiency of this can be improved through GPU acceleration if needed. - var triangles: [MeshTriangle] { + public var triangles: [MeshTriangle] { var result: [MeshTriangle] = [] for i in stride(from: 0, to: indices.count, by: 3) { let i0 = Int(indices[i]) @@ -57,31 +58,31 @@ struct MeshContents: Sendable { } /// - Warning: Ideally, this struct should be avoided for performance reasons. It is recommended to use the `MeshContents` properties directly for efficient processing. -struct MeshPolygon: Sendable { - let v0: simd_float3 - let v1: simd_float3 - let v2: simd_float3 +public struct MeshPolygon: Sendable { + public let v0: simd_float3 + public let v1: simd_float3 + public let v2: simd_float3 - let index0: Int - let index1: Int - let index2: Int + public let index0: Int + public let index1: Int + public let index2: Int - var centroid: simd_float3 { + public var centroid: simd_float3 { return (v0 + v1 + v2) / 3.0 } - var vertices: [simd_float3] { + public var vertices: [simd_float3] { return [v0, v1, v2] } - var area: Float { + public var area: Float { let edge1 = v1 - v0 let edge2 = v2 - v0 let crossProduct = simd_cross(edge1, edge2) return simd_length(crossProduct) / 2.0 } - var normal: simd_float3 { + public var normal: simd_float3 { let edge1 = v1 - v0 let edge2 = v2 - v0 return simd_normalize(simd_cross(edge1, edge2)) @@ -91,7 +92,7 @@ struct MeshPolygon: Sendable { /** Enum representing the dimensions of a mesh. */ -enum MeshDimension: CaseIterable, Codable, Sendable { +public enum MeshDimension: CaseIterable, Codable, Sendable { /// The X dimension. Horizontal axis. Matches the latitude direction as measured by Location services. case x /// The Y dimension. Vertical axis. @@ -102,7 +103,7 @@ enum MeshDimension: CaseIterable, Codable, Sendable { /** Provides the index corresponding to the dimension. */ - var index: Int { + public var index: Int { switch self { case .x: return 0 diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift new file mode 100644 index 00000000..30aae898 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshGPUDefinitions.swift @@ -0,0 +1,27 @@ +// +// MeshGPUDefinitions.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/27/25. +// +import Foundation +import PointNMapShaderTypes + +public struct MeshGPUAnchor { + public var vertexBuffer: MTLBuffer + public var indexBuffer: MTLBuffer + public var classificationBuffer: MTLBuffer? = nil + public var anchorTransform: simd_float4x4 + public var vertexCount: Int = 0 + public var indexCount: Int = 0 + public var faceCount: Int = 0 + public var generation: Int = 0 +} + +public struct MeshGPUSnapshot { + public let vertexStride: Int + public let vertexOffset: Int + public let indexStride: Int + public let classificationStride: Int + public let anchors: [UUID: MeshGPUAnchor] +} diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Helpers/MeshHelpers.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Helpers/MeshHelpers.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Mesh/Helpers/MeshHelpers.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Helpers/MeshHelpers.swift index a8ac1fec..b17ea8e2 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Helpers/MeshHelpers.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Helpers/MeshHelpers.swift @@ -6,12 +6,13 @@ // import simd +import CoreGraphics /** Helper functions for processing mesh polygons on the CPU. */ -struct MeshHelpers { - static func getPolygonsCoordinates( +public struct MeshHelpers { + public static func getPolygonsCoordinates( meshPolygons: [MeshPolygon], viewMatrix: simd_float4x4, cameraIntrinsics: simd_float3x3, @@ -41,7 +42,7 @@ struct MeshHelpers { return trianglePoints } - static func projectWorldToPixel( + public static func projectWorldToPixel( _ world: simd_float3, viewMatrix: simd_float4x4, // (world->camera) intrinsics K: simd_float3x3, diff --git a/IOSAccessAssessment/ComputerVision/Mesh/MeshGPUSnapshot.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift similarity index 95% rename from IOSAccessAssessment/ComputerVision/Mesh/MeshGPUSnapshot.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift index cdb72c66..e339b9c1 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/MeshGPUSnapshot.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift @@ -10,9 +10,9 @@ import RealityKit /** Functionality to capture ARMeshAnchor data as a GPU-friendly snapshot */ -final class MeshGPUSnapshotGenerator: NSObject { - // MARK: These constants can be made configurable later - // But make sure that the snapshot from MeshContents extension continues to use the original constants. +public final class MeshGPUSnapshotGenerator: NSObject { + // MARK: These SharedAppConstants can be made configurable later + // But make sure that the snapshot from MeshContents extension continues to use the original SharedAppConstants. private let defaultBufferSize: Int = 1024 private let vertexElemSize: Int = MemoryLayout.stride * 3 private let vertexOffset: Int = 0 @@ -30,21 +30,21 @@ final class MeshGPUSnapshotGenerator: NSObject { private let anchorLifetimeThreshold: Int = 10 private let device: MTLDevice - var currentSnapshot: MeshGPUSnapshot? + public var currentSnapshot: MeshGPUSnapshot? - init(device: MTLDevice) { + public init(device: MTLDevice) { self.device = device } - func reset() { + public func reset() { currentSnapshot = nil } - func buffers(for anchorId: UUID) -> MeshGPUAnchor? { + public func buffers(for anchorId: UUID) -> MeshGPUAnchor? { return currentSnapshot?.anchors[anchorId] } - func snapshotAnchors(_ anchors: [ARAnchor]) throws { + public func snapshotAnchors(_ anchors: [ARAnchor]) throws { let meshAnchors = anchors.compactMap { $0 as? ARMeshAnchor } let meshAnchorIds: Set = Set(meshAnchors.map { $0.identifier }) var meshGPUAnchors: [UUID: MeshGPUAnchor] = [:] @@ -79,7 +79,7 @@ final class MeshGPUSnapshotGenerator: NSObject { ) } - func removeAnchors(_ anchors: [ARAnchor]) { + public func removeAnchors(_ anchors: [ARAnchor]) { let meshAnchors = anchors.compactMap { $0 as? ARMeshAnchor } var meshGPUAnchors = currentSnapshot?.anchors ?? [:] for (_, meshAnchor) in meshAnchors.enumerated() { @@ -98,7 +98,7 @@ final class MeshGPUSnapshotGenerator: NSObject { TODO: Check possibility of blitting directly to MTLBuffer using a blit command encoder for better performance */ - func createSnapshot(meshAnchor: ARMeshAnchor) throws -> MeshGPUAnchor { + public func createSnapshot(meshAnchor: ARMeshAnchor) throws -> MeshGPUAnchor { let geometry = meshAnchor.geometry let vertices = geometry.vertices // ARGeometrySource (format .float3) let faces = geometry.faces // ARGeometryElement @@ -177,7 +177,7 @@ final class MeshGPUSnapshotGenerator: NSObject { /** Extension to generate snapshot from MeshContents. */ -extension MeshGPUSnapshotGenerator { +public extension MeshGPUSnapshotGenerator { /** Uses MeshContents to create a snapshot that can be used for GPU processing. This is useful for testing and visualization purposes where we want to bypass ARKit and directly feed in mesh data. diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift similarity index 93% rename from IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift index 2ab97802..dda5a497 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/Utils/MeshRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift @@ -12,8 +12,8 @@ import PointNMapShared /** Functions to rasterize mesh triangles into an image. */ -struct MeshRasterizer { - static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { +public struct MeshRasterizer { + public static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { let path = UIBezierPath() guard let firstPoint = points.first else { return path } @@ -37,7 +37,7 @@ struct MeshRasterizer { - size: The size of the output image. - boundsConfig: Configuration for drawing triangle bounds, including color and line width. */ - static func rasterizeMesh( + public static func rasterizeMesh( polygonsNormalizedCoordinates: [(SIMD2, SIMD2, SIMD2)], size: CGSize, boundsConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 2.0) ) -> CGImage? { diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/Plane.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/Plane.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Projection/Plane/Plane.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/Plane.metal diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift similarity index 95% rename from IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift index a843bbca..29c97a5d 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift @@ -10,14 +10,15 @@ import RealityKit import MetalKit import Accelerate import simd +import PointNMapShaderTypes -enum PlaneAttributeProcessorError: Error, LocalizedError { +public enum PlaneAttributeProcessorError: Error, LocalizedError { case metalInitializationFailed case metalPipelineCreationError case metalPipelineBlitEncoderError case endpointsComputationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -31,24 +32,24 @@ enum PlaneAttributeProcessorError: Error, LocalizedError { } } -struct ProjectedPointBin: Sendable { - let binValueCount: Int - let binValues: [Float] - let sRange: (Float, Float) +public struct ProjectedPointBin: Sendable { + public let binValueCount: Int + public let binValues: [Float] + public let sRange: (Float, Float) } -struct ProjectedPointBins: Sendable { - let binCount: Int - let binSize: Float - let bins: [ProjectedPointBin] +public struct ProjectedPointBins: Sendable { + public let binCount: Int + public let binSize: Float + public let bins: [ProjectedPointBin] } -struct BinWidth: Sendable { - let width: Float - let count: Int +public struct BinWidth: Sendable { + public let width: Float + public let count: Int } -struct PlaneAttributeProcessor { +public struct PlaneAttributeProcessor { private let device: MTLDevice private let commandQueue: MTLCommandQueue @@ -58,10 +59,10 @@ struct PlaneAttributeProcessor { private let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { - throw WorldPointsProcessorError.metalInitializationFailed + throw PlaneAttributeProcessorError.metalInitializationFailed } self.device = device self.commandQueue = commandQueue @@ -71,12 +72,12 @@ struct PlaneAttributeProcessor { guard let binPointKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "binProjectedPoints"), let binPointPipeline = try? device.makeComputePipelineState(function: binPointKernelFunction) else { - throw WorldPointsProcessorError.metalInitializationFailed + throw PlaneAttributeProcessorError.metalInitializationFailed } self.binPointPipeline = binPointPipeline guard let binTriangleKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "binMeshTriangles"), let binTrianglePipeline = try? device.makeComputePipelineState(function: binTriangleKernelFunction) else { - throw WorldPointsProcessorError.metalInitializationFailed + throw PlaneAttributeProcessorError.metalInitializationFailed } self.binTrianglePipeline = binTrianglePipeline } @@ -90,7 +91,7 @@ struct PlaneAttributeProcessor { - Returns: A ProjectedPointBinValues object containing the binned values. */ - func binProjectedPoints( + public func binProjectedPoints( projectedPoints: [ProjectedPoint], binSize: Float = 0.25 ) throws -> ProjectedPointBins { @@ -211,7 +212,7 @@ struct PlaneAttributeProcessor { - Returns: An array of BinWidth representing the computed widths for each bin. */ - func computeWidthByBin( + public func computeWidthByBin( projectedPointBins: ProjectedPointBins, minCount: Int = 100, trimLow: Float = 0.05, trimHigh: Float = 0.95 @@ -241,7 +242,7 @@ struct PlaneAttributeProcessor { /** Get the endpoints of the sidewalk along the 's' axis by analyzing the projected points. */ - func getEndpointsFromBins( + public func getEndpointsFromBins( projectedPointBins: ProjectedPointBins, trimLow: Float = 0.05, trimHigh: Float = 0.95 ) throws -> (ProjectedPoint, ProjectedPoint) { @@ -267,7 +268,7 @@ struct PlaneAttributeProcessor { } } -extension PlaneAttributeProcessor { +public extension PlaneAttributeProcessor { /** Bin projected points for mesh triangles using a reference projected point binning along the 's' axis. */ diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneProcessor.swift similarity index 93% rename from IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneProcessor.swift index bbe8c17a..4b055e20 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneProcessor.swift @@ -7,14 +7,16 @@ import Accelerate import CoreImage +import simd +import PointNMapShaderTypes -enum PlaneProcessorError: Error, LocalizedError { +public enum PlaneProcessorError: Error, LocalizedError { case initializationError(message: String) case invalidPointData case invalidPlaneData case invalidProjectionData - var errorDescription: String? { + public var errorDescription: String? { switch self { case .initializationError(let message): return "PlaneFit Initialization Error: \(message)" @@ -28,15 +30,15 @@ enum PlaneProcessorError: Error, LocalizedError { } } -struct Plane: Sendable, CustomStringConvertible { - var firstVector: simd_float3 - var secondVector: simd_float3 - var normalVector: simd_float3 // Normal vector - var d: Float // Offset from origin +public struct Plane: Sendable, CustomStringConvertible { + public var firstVector: simd_float3 + public var secondVector: simd_float3 + public var normalVector: simd_float3 // Normal vector + public var d: Float // Offset from origin - var origin: simd_float3 + public var origin: simd_float3 - var description: String { + public var description: String { return "Plane(firstVector: \(firstVector), \nsecondVector: \(secondVector), \nnormalVector: \(normalVector), \nd: \(d), \norigin: \(origin))" } } @@ -45,28 +47,28 @@ struct Plane: Sendable, CustomStringConvertible { ProjectedPlane represents the 2D projection of a 3D plane onto the image plane, containing the projected origin and vectors. Each vector is a tuple of two points in 2D pixel coordinates, representing the start and end of the projected vector. */ -struct ProjectedPlane: Sendable, CustomStringConvertible { - var origin: SIMD2 - var firstVector: (SIMD2, SIMD2) - var secondVector: (SIMD2, SIMD2) - var normalVector: (SIMD2, SIMD2) +public struct ProjectedPlane: Sendable, CustomStringConvertible { + public var origin: SIMD2 + public var firstVector: (SIMD2, SIMD2) + public var secondVector: (SIMD2, SIMD2) + public var normalVector: (SIMD2, SIMD2) /// Can contain reference vectors for debugging or visualization - var additionalVectors: [(SIMD2, SIMD2)] + public var additionalVectors: [(SIMD2, SIMD2)] - var description: String { + public var description: String { return "ProjectedPlane(firstVector: \(firstVector), \nsecondVector: \(secondVector), \nnormalVector: \(normalVector), additionalVectorsCount: \(additionalVectors.count), \norigin: \(origin))" } } -struct PlaneProcessor { +public struct PlaneProcessor { private let worldPointsProcessor: WorldPointsProcessor - init(worldPointsProcessor: WorldPointsProcessor) { + public init(worldPointsProcessor: WorldPointsProcessor) { self.worldPointsProcessor = worldPointsProcessor } - func fitPlanePCA(worldPoints: [WorldPoint]) throws -> Plane { + public func fitPlanePCA(worldPoints: [WorldPoint]) throws -> Plane { guard worldPoints.count>=3 else { throw PlaneProcessorError.invalidPointData } @@ -124,7 +126,7 @@ struct PlaneProcessor { return plane } - func fitPlanePCA(points: [WorldPoint], weights: [Float]? = nil) throws -> Plane { + public func fitPlanePCA(points: [WorldPoint], weights: [Float]? = nil) throws -> Plane { guard points.count>=3 else { throw PlaneProcessorError.invalidPointData } @@ -193,7 +195,7 @@ struct PlaneProcessor { /** Extension for aligning planes based on camera view direction. */ -extension PlaneProcessor { +public extension PlaneProcessor { /** Function to align the plane's vectors based on camera view direction. @@ -205,9 +207,9 @@ extension PlaneProcessor { plane: Plane, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, - imageSize: CGSize + imageSize: CGSize, + alignmentThreshold: Float = PointNMapConstants.OtherConstants.directionAlignmentDotProductThreshold ) throws -> Plane { - let alignmentThreshold = Constants.OtherConstants.directionAlignmentDotProductThreshold let viewVector = simd_normalize(simd_float3( cameraTransform.columns.2.x, cameraTransform.columns.2.y, @@ -278,7 +280,7 @@ extension PlaneProcessor { /** Extension for projecting planes to 2D pixel coordinates. */ -extension PlaneProcessor { +public extension PlaneProcessor { /** Function to project a 3D plane to 2D pixel coordinates. Can be used for visualization or debugging purposes. diff --git a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneRasterizer.swift similarity index 91% rename from IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneRasterizer.swift index e8909eb1..06ae1da1 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/Plane/PlaneRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneRasterizer.swift @@ -7,13 +7,12 @@ import CoreImage import UIKit -import PointNMapShared -struct PlaneRasterizer { +public struct PlaneRasterizer { /** The path points for ProjectedPlane are unnormalized. */ - static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { + public static func createPath(points: [SIMD2], size: CGSize) -> UIBezierPath { let path = UIBezierPath() guard let firstPoint = points.first else { return path } @@ -29,14 +28,14 @@ struct PlaneRasterizer { return path } - static func createCircle(point: SIMD2, size: CGSize, radius: CGFloat) -> UIBezierPath { + public static func createCircle(point: SIMD2, size: CGSize, radius: CGFloat) -> UIBezierPath { let path = UIBezierPath() let pixelPoint = CGPoint(x: CGFloat(point.x), y: (size.height - CGFloat(point.y))) path.addArc(withCenter: pixelPoint, radius: radius, startAngle: 0, endAngle: CGFloat(2 * Double.pi), clockwise: true) return path } - static func rasterizePlane( + public static func rasterizePlane( projectedPlane: ProjectedPlane, size: CGSize, linesConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 4.0) diff --git a/IOSAccessAssessment/ComputerVision/Projection/ProjectionUtils.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/ProjectionUtils.swift similarity index 96% rename from IOSAccessAssessment/ComputerVision/Projection/ProjectionUtils.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/ProjectionUtils.swift index 156e799d..f13f0834 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/ProjectionUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/ProjectionUtils.swift @@ -12,7 +12,7 @@ import simd /** Struct that contains utility functions related to projection and coordinate transformations, such as projecting world points to screen space or calculating view directions. This can be used across the application wherever such transformations are needed, especially in the context of accessibility feature estimation and visualization. */ -struct ProjectionUtils { +public struct ProjectionUtils { /** Projects a 2D pixel point with depth information to a 3D world point using the camera's transform and intrinsics. @@ -25,7 +25,7 @@ struct ProjectionUtils { - NOTE: Because of the inefficiency of matrix inversion, it's recommended to precompute the inverse of the camera intrinsics matrix if projecting multiple points, and use the version of this function that accepts the inverse directly. */ - static func projectPixelToWorld( + public static func projectPixelToWorld( pixelPoint: CGPoint, depth: Float, cameraTransform: simd_float4x4, @@ -47,7 +47,7 @@ struct ProjectionUtils { - cameraTransform: The 4x4 transformation matrix representing the camera's position and orientation in world space. - invCameraIntrinsics: The precomputed inverse of the camera's intrinsic parameters matrix. */ - static func projectPixelToWorld( + public static func projectPixelToWorld( pixelPoint: CGPoint, depth: Float, cameraTransform: simd_float4x4, @@ -82,7 +82,7 @@ struct ProjectionUtils { - NOTE: Because of the inefficiency of matrix inversion, it's recommended to precompute the inverse of the camera transform if projecting multiple points, and use the version of this function that accepts the view matrix directly. */ - static func unprojectWorldToPixel( + public static func unprojectWorldToPixel( worldPoint: simd_float3, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, @@ -103,7 +103,7 @@ struct ProjectionUtils { - cameraIntrinsics: The 3x3 matrix containing the camera's intrinsic parameters. - imageSize: The size of the image in pixels, used to check if the projected point is within the image bounds. */ - static func unprojectWorldToPixel( + public static func unprojectWorldToPixel( worldPoint: simd_float3, viewMatrix: simd_float4x4, cameraIntrinsics: simd_float3x3, diff --git a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift similarity index 98% rename from IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift index c728f602..dcb91c16 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/ProjectedWorldPointsExtension.swift @@ -9,11 +9,12 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes /** Extension for projecting world points to plane and unprojecting them back to world coordinates. */ -extension WorldPointsProcessor { +public extension WorldPointsProcessor { func projectPointsToPlane( worldPoints: [WorldPoint], plane: Plane, diff --git a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift similarity index 96% rename from IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift index 90cf5c61..cee137d4 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/Extensions/WorldPointsGridExtension.swift @@ -9,16 +9,17 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes /** A grid of world points structured for efficient spatial queries based on their projected pixel coordinates. */ -struct WorldPointsGrid: Sendable { - let width: Int - let height: Int - var data: [WorldPointsGridCell] +public struct WorldPointsGrid: Sendable { + public let width: Int + public let height: Int + public var data: [WorldPointsGridCell] - subscript(x: Int, y: Int) -> WorldPointsGridCell { + public subscript(x: Int, y: Int) -> WorldPointsGridCell { get { return data[y * width + x] } set { data[y * width + x] = newValue } } @@ -27,7 +28,7 @@ struct WorldPointsGrid: Sendable { /** Extension for restructuring world points array into more efficient data structures for improved post-processing. */ -extension WorldPointsProcessor { +public extension WorldPointsProcessor { /** Restructure world points into a 2D grid based on their projected pixel coordinates, for more efficient spatial queries. This method uses the GPU for parallel processing of world points, which can significantly speed up the operation for large point clouds. */ diff --git a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/WorldPoints.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPoints.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Projection/WorldPoints/WorldPoints.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPoints.metal diff --git a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift index 4cb8a97a..b073b776 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift @@ -9,8 +9,9 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes -enum WorldPointsProcessorError: Error, LocalizedError { +public enum WorldPointsProcessorError: Error, LocalizedError { case metalInitializationFailed case invalidInputImage case textureCreationFailed @@ -20,7 +21,7 @@ enum WorldPointsProcessorError: Error, LocalizedError { case unableToProcessBufferData case noWorldPointsToProcess - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -45,7 +46,7 @@ enum WorldPointsProcessorError: Error, LocalizedError { /** Extacting 3D world points. */ -struct WorldPointsProcessor { +public struct WorldPointsProcessor { let device: MTLDevice let commandQueue: MTLCommandQueue @@ -56,7 +57,7 @@ struct WorldPointsProcessor { let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw WorldPointsProcessorError.metalInitializationFailed @@ -87,12 +88,14 @@ struct WorldPointsProcessor { /** Extract world points from segmentation and depth images (GPU version). */ - func getWorldPoints( + public func getWorldPoints( segmentationLabelImage: CIImage, depthImage: CIImage, targetValue: UInt8, cameraTransform: simd_float4x4, - cameraIntrinsics: simd_float3x3 + cameraIntrinsics: simd_float3x3, + depthMinThreshold: Float = PointNMapConstants.DepthConstants.depthMinThreshold, + depthMaxThreshold: Float = PointNMapConstants.DepthConstants.depthMaxThreshold ) throws -> [WorldPoint] { guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { throw WorldPointsProcessorError.metalPipelineCreationError @@ -117,8 +120,8 @@ struct WorldPointsProcessor { var targetValueVar = targetValue var params = WorldPointsParams( imageSize: imageSize, - minDepthThreshold: Constants.DepthConstants.depthMinThreshold, - maxDepthThreshold: Constants.DepthConstants.depthMaxThreshold, + minDepthThreshold: depthMinThreshold, + maxDepthThreshold: depthMaxThreshold, cameraTransform: cameraTransform, invIntrinsics: invIntrinsics ) @@ -191,15 +194,15 @@ struct WorldPointsProcessor { /** Extract world points from segmentation and depth images (CPU version). */ - func getWorldPointsCPU( + public func getWorldPointsCPU( segmentationLabelImage: CIImage, depthImage: CIImage, targetValue: UInt8, cameraTransform: simd_float4x4, - cameraIntrinsics: simd_float3x3 + cameraIntrinsics: simd_float3x3, + minDepthThreshold: Float = PointNMapConstants.DepthConstants.depthMinThreshold, + maxDepthThreshold: Float = PointNMapConstants.DepthConstants.depthMaxThreshold ) throws -> [WorldPoint] { - let minDepthThreshold = Constants.DepthConstants.depthMinThreshold - let maxDepthThreshold = Constants.DepthConstants.depthMaxThreshold let invIntrinsics = simd_inverse(cameraIntrinsics) /// Get CVPixelBuffer from segmentation image @@ -288,7 +291,7 @@ struct WorldPointsProcessor { /** Extension for debugging world points statistics. */ -extension WorldPointsProcessor { +public extension WorldPointsProcessor { private func debugWorldPoints(_ worldPoints: [WorldPoint]) { debugAxis(worldPoints, axisIndex: 0, axisLabel: "X") debugAxis(worldPoints, axisIndex: 1, axisLabel: "Y") diff --git a/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift new file mode 100644 index 00000000..9a147d74 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift @@ -0,0 +1,69 @@ +// +// PointNMapConstants.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// +import SwiftUI + +public struct PointNMapConstants { + // Supported Classes + static let SelectedAccessibilityFeatureConfig: AccessibilityFeatureClassConfig = AccessibilityFeatureConfig.mapillaryCustom11Config + + public struct DepthConstants { + /// Model-specific SharedAppConstants + public static let inputSize: CGSize = CGSize(width: 518, height: 392) + + /// General SharedAppConstants + public static let depthMinThreshold: Float = 0.0 + public static let depthMaxThreshold: Float = 5.0 + } + + public struct DamageDetectionConstants { + /// Model-specific SharedAppConstants + public static let damageDetectionModelURL: URL? = Bundle.main.url(forResource: "v8n_175_16_960", withExtension: "mlmodelc") + public static let inputSize: CGSize = CGSize(width: 640, height: 640) + } + + public struct SurfaceIntegrityConstants { + /// Image (world point) based SharedAppConstants + public static let imagePlaneAngularDeviationThreshold: Float = 15.0 // Unit: degrees + public static let imageDeviantPointProportionThreshold: Float = 0.1 // Unit: percentage (0 to 1) + public static let imageBoundingBoxAreaThreshold: Float = 0.1 // Unit: m2 + public static let imageBoundingBoxAngularStdThreshold: Float = 10.0 // Unit: degrees + + /// Mesh based SharedAppConstants + public static let meshPlaneAngularDeviationThreshold: Float = 7.5 // Unit: degrees + public static let meshDeviantPolygonProportionThreshold: Float = 0.1 // Unit: percentage (0 to 1) + public static let meshBoundingBoxAreaThreshold: Float = 0.1 // Unit: m2 + public static let meshBoundingBoxAngularStdThreshold: Float = 5.0 // Unit: degrees + } + + public struct WorkspaceConstants { + public static let primaryWorkspaceIds: [String] = ["1830"] +// ["1463"] +// ["288", "349", "1411"] +// "252", "322", "368", "374", "378", "381", "384", "323", "369", "156", "375", "379"] + + public static let fetchRadiusInMeters: Double = 100.0 + public static let fetchUpdateRadiusThresholdInMeters: Double = 50.0 + public static let updateElementDistanceThresholdInMeters: Double = 20.0 + } + + public struct OtherConstants { + public static let directionAlignmentDotProductThreshold: Float = 0.866 // cos(30 degrees) + } + + public struct UserDefaultsKeys { + /// Environment selection + public static let selectedEnvironmentKey = "selectedEnvironment" + + /// Workspace selection + public static let selectedWorkspaceIdKey = "selectedWorkspaceId" + public static let selectedWorkspaceTitleKey = "selectedWorkspaceTitle" + + /// User settings + public static let isEnhancedAnalysisEnabledKey = "isEnhancedAnalysisEnabled" + public static let appModeKey = "appMode" + } +} diff --git a/IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift b/PointNMapShared/Sources/PointNMap/Shared/Utils/MetalBufferUtils.swift similarity index 100% rename from IOSAccessAssessment/Shared/Utils/MetalBufferUtils.swift rename to PointNMapShared/Sources/PointNMap/Shared/Utils/MetalBufferUtils.swift From 27aa3fe04d008fe987a0624bb30ca33522096ca9 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 15:16:44 -0700 Subject: [PATCH 08/14] Add more files for ComputerVision and MachineLearning into the framework --- IOSAccessAssessment.xcodeproj/project.pbxproj | 142 +----------------- .../Plane/PlaneAttributeProcessor.swift | 5 +- .../SurfaceIntegrityFromImageExtension.swift | 19 +-- .../SurfaceIntegrityFromMeshExtension.swift | 23 +-- .../SurfaceIntegrity/SurfaceIntegrity.metal | 0 .../SurfaceIntegrityProcessor.swift | 36 ++--- .../SurfaceNormals/SurfaceNormals.metal | 0 .../SurfaceNormalsProcessor.swift | 30 ++-- .../WorldPoints/WorldPointsProcessor.swift | 7 +- .../Clustering/ConnectedComponents.swift | 4 +- .../MachineLearning/Clustering/DBSCAN.swift | 4 +- .../DamageDetectionPipeline.swift | 11 +- .../DamageDetectionRasterizer.swift | 5 +- ...DamageDetectionModelRequestProcessor.swift | 27 ++-- .../SegmentationModelRequestProcessor.swift | 16 +- .../Segmentation/SegmentationARPipeline.swift | 28 ++-- .../SegmentationAnnotationPipeline.swift | 26 ++-- .../PointNMap/Shared/PointNMapConstants.swift | 4 +- 18 files changed, 133 insertions(+), 254 deletions(-) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift (97%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift (97%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrity.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift (92%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceNormals/SurfaceNormals.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift (94%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/Clustering/ConnectedComponents.swift (97%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/Clustering/DBSCAN.swift (95%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/DamageDetection/DamageDetectionPipeline.swift (77%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift (93%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift (87%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift (86%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/Segmentation/SegmentationARPipeline.swift (92%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift (93%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 3407adcc..e442425f 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -27,10 +27,8 @@ A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */; }; A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */; }; A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */; }; - A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */; }; A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */; }; A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */; }; - A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A30D05832E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage */; }; A30F59CE2F7EFAC000EE7804 /* WidthExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CD2F7EFABC00EE7804 /* WidthExtension.swift */; }; A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */; }; A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */; }; @@ -53,9 +51,6 @@ A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943562EE81BF700C4C1BC /* OSWLineString.swift */; }; A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943582EE8204400C4C1BC /* OSWPolygon.swift */; }; A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */; }; - A32D66F72F7EE88300DC4173 /* v8n_175_16_960.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A32D66F62F7EE88300DC4173 /* v8n_175_16_960.mlpackage */; }; - A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66FA2F7EE9D500DC4173 /* DamageDetectionModelRequestProcessor.swift */; }; - A32D66FD2F7EF10F00DC4173 /* DamageDetectionPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66FC2F7EF10A00DC4173 /* DamageDetectionPipeline.swift */; }; A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */; }; A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E012F26FA2700B96610 /* LocationExtension.swift */; }; @@ -89,7 +84,6 @@ A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */; }; A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */; }; A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */; }; - A37E72102ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */; }; A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */; }; A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */; }; A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338BE2EDA889A00F1A402 /* CustomPicker.swift */; }; @@ -98,19 +92,10 @@ A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A52ECD86260039298C /* AccessibilityFeature.swift */; }; - A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413AC2ECF94950039298C /* DBSCAN.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; - A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */; }; - A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; - A3C1D7352F84A78E00833411 /* SurfaceNormalsProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7342F84A78800833411 /* SurfaceNormalsProcessor.swift */; }; - A3C1D7392F84A84900833411 /* SurfaceNormals.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7382F84A84900833411 /* SurfaceNormals.metal */; }; - A3C1D7402F882EF600833411 /* SurfaceIntegrityProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D73F2F882EF200833411 /* SurfaceIntegrityProcessor.swift */; }; - A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7432F886D3100833411 /* SurfaceIntegrity.metal */; }; - A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7462F886D8E00833411 /* SurfaceIntegrityFromImageExtension.swift */; }; - A3C1D7492F886DDE00833411 /* SurfaceIntegrityFromMeshExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C1D7482F886DD800833411 /* SurfaceIntegrityFromMeshExtension.swift */; }; A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */ = {isa = PBXBuildFile; productRef = A3C22FD72CF2F0C300533BF7 /* DequeModule */; }; A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */; }; @@ -144,7 +129,6 @@ A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */; }; A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA772DE01634002B99BD /* ARCameraUtils.swift */; }; A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */; }; - A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA7D2DE3E41D002B99BD /* SegmentationARPipeline.swift */; }; A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA7F2DE444C3002B99BD /* AnnotationOption.swift */; }; A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */; }; CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */ = {isa = PBXBuildFile; fileRef = CA924A922CEB9AB000FCA928 /* ChangesetService.swift */; }; @@ -251,10 +235,8 @@ A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFeature.swift; sourceTree = ""; }; A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshInstancePolicy.swift; sourceTree = ""; }; A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttributeExtension.swift; sourceTree = ""; }; - A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConnectedComponents.swift; sourceTree = ""; }; A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditableAccessibilityFeature.swift; sourceTree = ""; }; A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedAccessibilityFeature.swift; sourceTree = ""; }; - A30D05832E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_11_640_640.mlpackage; sourceTree = ""; }; A30F59CD2F7EFABC00EE7804 /* WidthExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidthExtension.swift; sourceTree = ""; }; A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunninSlopeExtension.swift; sourceTree = ""; }; A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CrossSlopeExtension.swift; sourceTree = ""; }; @@ -274,9 +256,6 @@ A32943562EE81BF700C4C1BC /* OSWLineString.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWLineString.swift; sourceTree = ""; }; A32943582EE8204400C4C1BC /* OSWPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolygon.swift; sourceTree = ""; }; A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWMultiPolygon.swift; sourceTree = ""; }; - A32D66F62F7EE88300DC4173 /* v8n_175_16_960.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = v8n_175_16_960.mlpackage; sourceTree = ""; }; - A32D66FA2F7EE9D500DC4173 /* DamageDetectionModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionModelRequestProcessor.swift; sourceTree = ""; }; - A32D66FC2F7EF10A00DC4173 /* DamageDetectionPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionPipeline.swift; sourceTree = ""; }; A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEndpoint.swift; sourceTree = ""; }; A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEnvironment.swift; sourceTree = ""; }; A3431E012F26FA2700B96610 /* LocationExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationExtension.swift; sourceTree = ""; }; @@ -312,7 +291,6 @@ A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureSnapshot.swift; sourceTree = ""; }; A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureClassSnapshot.swift; sourceTree = ""; }; A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppContext.swift; sourceTree = ""; }; - A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationAnnotationPipeline.swift; sourceTree = ""; }; A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturedMeshDefinitions.swift; sourceTree = ""; }; A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourFeatureRasterizer.swift; sourceTree = ""; }; A38338BE2EDA889A00F1A402 /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; @@ -321,19 +299,10 @@ A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; A3A413A52ECD86260039298C /* AccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeature.swift; sourceTree = ""; }; - A3A413AC2ECF94950039298C /* DBSCAN.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DBSCAN.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; - A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationModelRequestProcessor.swift; sourceTree = ""; }; - A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamageDetectionRasterizer.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; - A3C1D7342F84A78800833411 /* SurfaceNormalsProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceNormalsProcessor.swift; sourceTree = ""; }; - A3C1D7382F84A84900833411 /* SurfaceNormals.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = SurfaceNormals.metal; sourceTree = ""; }; - A3C1D73F2F882EF200833411 /* SurfaceIntegrityProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityProcessor.swift; sourceTree = ""; }; - A3C1D7432F886D3100833411 /* SurfaceIntegrity.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = SurfaceIntegrity.metal; sourceTree = ""; }; - A3C1D7462F886D8E00833411 /* SurfaceIntegrityFromImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityFromImageExtension.swift; sourceTree = ""; }; - A3C1D7482F886DD800833411 /* SurfaceIntegrityFromMeshExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityFromMeshExtension.swift; sourceTree = ""; }; A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceViewModel.swift; sourceTree = ""; }; A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; @@ -368,7 +337,6 @@ A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraManager.swift; sourceTree = ""; }; A3FFAA772DE01634002B99BD /* ARCameraUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraUtils.swift; sourceTree = ""; }; A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraView.swift; sourceTree = ""; }; - A3FFAA7D2DE3E41D002B99BD /* SegmentationARPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationARPipeline.swift; sourceTree = ""; }; A3FFAA7F2DE444C3002B99BD /* AnnotationOption.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationOption.swift; sourceTree = ""; }; A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_53_640_640.mlpackage; sourceTree = ""; }; CA924A922CEB9AB000FCA928 /* ChangesetService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChangesetService.swift; sourceTree = ""; }; @@ -387,6 +355,9 @@ isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, + Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift, + Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift, + Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift, ); platformFiltersByRelativePath = { PointNMapShared.h = ( @@ -410,6 +381,9 @@ isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, + Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift, + Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift, + Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift, ); target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; }; @@ -551,9 +525,7 @@ isa = PBXGroup; children = ( A33EB5AD2F761BFC008ABFB7 /* ModelZoo */, - A3A413AB2ECF948E0039298C /* Clustering */, A3F38C4A2D38A2BF00900547 /* DepthEstimation */, - A32D66F82F7EE9AC00DC4173 /* DamageDetection */, DAA7F8BE2CA683DC003666D8 /* Segmentation */, ); path = MachineLearning; @@ -723,41 +695,6 @@ path = Others; sourceTree = ""; }; - A32D66F52F7EE82A00DC4173 /* DamageDetection */ = { - isa = PBXGroup; - children = ( - A32D66F62F7EE88300DC4173 /* v8n_175_16_960.mlpackage */, - ); - path = DamageDetection; - sourceTree = ""; - }; - A32D66F82F7EE9AC00DC4173 /* DamageDetection */ = { - isa = PBXGroup; - children = ( - A32D66F92F7EE9D000DC4173 /* Functions */, - A32D66FC2F7EF10A00DC4173 /* DamageDetectionPipeline.swift */, - A3B5BD9C2F81CED70036C6EC /* DamageDetectionRasterizer.swift */, - ); - path = DamageDetection; - sourceTree = ""; - }; - A32D66F92F7EE9D000DC4173 /* Functions */ = { - isa = PBXGroup; - children = ( - A32D66FA2F7EE9D500DC4173 /* DamageDetectionModelRequestProcessor.swift */, - ); - path = Functions; - sourceTree = ""; - }; - A32D66FF2F7EF83E00DC4173 /* Projection */ = { - isa = PBXGroup; - children = ( - A3C1D73E2F882EE100833411 /* SurfaceIntegrity */, - A3B5BD9E2F82FEA50036C6EC /* SurfaceNormals */, - ); - path = Projection; - sourceTree = ""; - }; A33EB5AC2F761B5D008ABFB7 /* Definitions */ = { isa = PBXGroup; children = ( @@ -769,7 +706,6 @@ A33EB5AD2F761BFC008ABFB7 /* ModelZoo */ = { isa = PBXGroup; children = ( - A32D66F52F7EE82A00DC4173 /* DamageDetection */, A3A413A82ECF947C0039298C /* DepthEstimation */, A3A413A72ECF94690039298C /* SemanticSegmentation */, ); @@ -781,7 +717,6 @@ children = ( DAA7F8C62CA76514003666D8 /* Image */, A3DA4DA62EB9320E005BB812 /* Mesh */, - A32D66FF2F7EF83E00DC4173 /* Projection */, ); path = ComputerVision; sourceTree = ""; @@ -1016,7 +951,6 @@ A3A413A72ECF94690039298C /* SemanticSegmentation */ = { isa = PBXGroup; children = ( - A30D05832E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage */, A36C6E012E134CE600A86004 /* bisenetv2_35_640_640.mlpackage */, A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */, A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */, @@ -1033,15 +967,6 @@ path = DepthEstimation; sourceTree = ""; }; - A3A413AB2ECF948E0039298C /* Clustering */ = { - isa = PBXGroup; - children = ( - A30BED392ED162ED004A5B51 /* ConnectedComponents.swift */, - A3A413AC2ECF94950039298C /* DBSCAN.swift */, - ); - path = Clustering; - sourceTree = ""; - }; A3A45F0F2EE7BD9C0029F5AE /* OSW */ = { isa = PBXGroup; children = ( @@ -1065,42 +990,6 @@ path = Helpers; sourceTree = ""; }; - A3B2DDBB2DC99212003416FB /* Functions */ = { - isa = PBXGroup; - children = ( - A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */, - ); - path = Functions; - sourceTree = ""; - }; - A3B5BD9E2F82FEA50036C6EC /* SurfaceNormals */ = { - isa = PBXGroup; - children = ( - A3C1D7342F84A78800833411 /* SurfaceNormalsProcessor.swift */, - A3C1D7382F84A84900833411 /* SurfaceNormals.metal */, - ); - path = SurfaceNormals; - sourceTree = ""; - }; - A3C1D73E2F882EE100833411 /* SurfaceIntegrity */ = { - isa = PBXGroup; - children = ( - A3C1D7452F886D8600833411 /* Extensions */, - A3C1D7432F886D3100833411 /* SurfaceIntegrity.metal */, - A3C1D73F2F882EF200833411 /* SurfaceIntegrityProcessor.swift */, - ); - path = SurfaceIntegrity; - sourceTree = ""; - }; - A3C1D7452F886D8600833411 /* Extensions */ = { - isa = PBXGroup; - children = ( - A3C1D7482F886DD800833411 /* SurfaceIntegrityFromMeshExtension.swift */, - A3C1D7462F886D8E00833411 /* SurfaceIntegrityFromImageExtension.swift */, - ); - path = Extensions; - sourceTree = ""; - }; A3DA4DA62EB9320E005BB812 /* Mesh */ = { isa = PBXGroup; children = ( @@ -1186,9 +1075,6 @@ DAA7F8BE2CA683DC003666D8 /* Segmentation */ = { isa = PBXGroup; children = ( - A3B2DDBB2DC99212003416FB /* Functions */, - A3FFAA7D2DE3E41D002B99BD /* SegmentationARPipeline.swift */, - A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */, ); path = Segmentation; sourceTree = ""; @@ -1466,7 +1352,6 @@ A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */, A3FE16632E18BAEB00DAE5BE /* ConfidenceEncoder.swift in Sources */, A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */, - A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */, A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */, CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */, A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */, @@ -1486,7 +1371,6 @@ A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */, A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */, A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */, - A3C1D7402F882EF600833411 /* SurfaceIntegrityProcessor.swift in Sources */, A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */, A30801602EC09BB700B1BA3A /* VOCClassConfig.swift in Sources */, A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */, @@ -1502,7 +1386,6 @@ CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */, - A3FFAA7E2DE3E41D002B99BD /* SegmentationARPipeline.swift in Sources */, A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */, A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */, @@ -1518,21 +1401,16 @@ A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */, - A32D66FB2F7EE9DA00DC4173 /* DamageDetectionModelRequestProcessor.swift in Sources */, - A3C1D7472F886D9D00833411 /* SurfaceIntegrityFromImageExtension.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, - A3C1D7352F84A78E00833411 /* SurfaceNormalsProcessor.swift in Sources */, A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */, A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */, A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */, A3EE6EFC2F69285600F515E6 /* LocationFromImageExtension.swift in Sources */, 55659C102BB7863F0094DF01 /* SetupView.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, - A32D66FD2F7EF10F00DC4173 /* DamageDetectionPipeline.swift in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, - A30D05842E2B1DB400316FB5 /* bisenetv2_11_640_640.mlpackage in Sources */, A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */, A3EE6E4C2F580E2B00F515E6 /* DatasetLister.swift in Sources */, @@ -1551,19 +1429,15 @@ A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */, A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */, - A3B5BD9D2F81CEDD0036C6EC /* DamageDetectionRasterizer.swift in Sources */, A3EE6E4E2F5A258B00F515E6 /* TestCameraManager.swift in Sources */, - A37E72102ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift in Sources */, A329434C2EE7CFE800C4C1BC /* OSWField.swift in Sources */, A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */, A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */, A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, - A3C1D7392F84A84900833411 /* SurfaceNormals.metal in Sources */, A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */, A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, - A3A413AD2ECF94970039298C /* DBSCAN.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, @@ -1580,7 +1454,6 @@ A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, - A3C1D7442F886D3500833411 /* SurfaceIntegrity.metal in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */, @@ -1590,11 +1463,8 @@ A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */, A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */, A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */, - A32D66F72F7EE88300DC4173 /* v8n_175_16_960.mlpackage in Sources */, A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */, - A3C1D7492F886DDE00833411 /* SurfaceIntegrityFromMeshExtension.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, - A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */, A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */, DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift index 29c97a5d..928f4310 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/Plane/PlaneAttributeProcessor.swift @@ -70,12 +70,13 @@ public struct PlaneAttributeProcessor { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let binPointKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "binProjectedPoints"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let binPointKernelFunction = library.makeFunction(name: "binProjectedPoints"), let binPointPipeline = try? device.makeComputePipelineState(function: binPointKernelFunction) else { throw PlaneAttributeProcessorError.metalInitializationFailed } self.binPointPipeline = binPointPipeline - guard let binTriangleKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "binMeshTriangles"), + guard let binTriangleKernelFunction = library.makeFunction(name: "binMeshTriangles"), let binTrianglePipeline = try? device.makeComputePipelineState(function: binTriangleKernelFunction) else { throw PlaneAttributeProcessorError.metalInitializationFailed } diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift similarity index 97% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift index d5b310e6..33a4d070 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromImageExtension.swift @@ -9,8 +9,9 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes -extension SurfaceIntegrityProcessor { +public extension SurfaceIntegrityProcessor { /** This function assesses the integrity of the surface based on the angular deviation of surface normals from the plane normal using GPU acceleration. It calculates the proportion of points that deviate beyond a specified angular threshold and determines the integrity status based on whether this proportion exceeds a defined threshold. */ @@ -20,8 +21,8 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold ) throws -> IntegrityStatusDetails { guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { throw SurfaceIntegrityProcessorError.metalPipelineCreationError @@ -109,7 +110,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold + boundingBoxAngularStdThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -245,8 +246,8 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imageDeviantPointProportionThreshold ) throws -> IntegrityStatusDetails { let width = surfaceNormalsForPointsGrid.width let height = surfaceNormalsForPointsGrid.height @@ -273,7 +274,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, bounds: BoundsParams, - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imagePlaneAngularDeviationThreshold ) -> (deviantPointCount: Int, totalPointCount: Int) { let planeNormal = plane.normalVector var totalDeviantPoints = 0 @@ -306,7 +307,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAreaThreshold, + boundingBoxAreaThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imageBoundingBoxAreaThreshold, boundingBoxWorldPointRetrievalRadius: Float = 3.0 ) throws -> IntegrityStatusDetails { let width = surfaceNormalsForPointsGrid.width @@ -387,7 +388,7 @@ extension SurfaceIntegrityProcessor { surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureImageDataProtocol), - boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold + boundingBoxAngularStdThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.imageBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift similarity index 97% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift index 411cc02c..b8897455 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/Extensions/SurfaceIntegrityFromMeshExtension.swift @@ -9,15 +9,16 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes -extension SurfaceIntegrityProcessor { +public extension SurfaceIntegrityProcessor { func getSurfaceNormalIntegrityResultFromMesh( meshTriangles: [MeshTriangle], plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - deviantPointProportionThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + deviantPointProportionThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold ) throws -> IntegrityStatusDetails { guard let commandBuffer = self.commandQueue.makeCommandBuffer() else { throw SurfaceIntegrityProcessorError.metalPipelineCreationError @@ -91,7 +92,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold + boundingBoxAreaThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -121,8 +122,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + boundingBoxAngularStdThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let totalBoundingBoxes = damageDetectionResults.count var deviantBoundingBoxes = 0 @@ -324,8 +325,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - deviantPolygonProportion: Float = SharedAppConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + deviantPolygonProportion: Float = PointNMapConstants.SurfaceIntegrityConstants.meshDeviantPolygonProportionThreshold ) throws -> IntegrityStatusDetails { let planeNormal = plane.normalVector var totalDeviantPolygons = 0 @@ -351,7 +352,7 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - boundingBoxAreaThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold + boundingBoxAreaThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshBoundingBoxAreaThreshold ) throws -> IntegrityStatusDetails { let viewMatrix = captureData.cameraTransform.inverse let totalBoundingBoxes = damageDetectionResults.count @@ -404,8 +405,8 @@ extension SurfaceIntegrityProcessor { plane: Plane, damageDetectionResults: [DamageDetectionResult], captureData: (any CaptureMeshDataProtocol), - angularDeviationThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, - boundingBoxAngularStdThreshold: Float = SharedAppConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold + angularDeviationThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshPlaneAngularDeviationThreshold, + boundingBoxAngularStdThreshold: Float = PointNMapConstants.SurfaceIntegrityConstants.meshBoundingBoxAngularStdThreshold ) throws -> IntegrityStatusDetails { let viewMatrix = captureData.cameraTransform.inverse let planeNormal = plane.normalVector diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrity.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrity.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrity.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrity.metal diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift similarity index 92% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift index ad29cb20..ccf15591 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift @@ -9,9 +9,9 @@ import ARKit import RealityKit import MetalKit import simd -import PointNMapShared +import PointNMapShaderTypes -enum SurfaceIntegrityProcessorError: Error, LocalizedError { +public enum SurfaceIntegrityProcessorError: Error, LocalizedError { case metalInitializationFailed case metalPipelineCreationError case metalPipelineBlitEncoderError @@ -19,7 +19,7 @@ enum SurfaceIntegrityProcessorError: Error, LocalizedError { case unableToProcessBufferData case meshPipelineBlitEncoderError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -53,23 +53,23 @@ enum SurfaceIntegrityProcessorError: Error, LocalizedError { // } //} -struct IntegrityStatusDetails { - var status: SurfaceIntegrityStatus - var details: String +public struct IntegrityStatusDetails { + public var status: SurfaceIntegrityStatus + public var details: String - init(status: SurfaceIntegrityStatus = .intact, details: String = "") { + public init(status: SurfaceIntegrityStatus = .intact, details: String = "") { self.status = status self.details = details } } -struct IntegrityResults { - var surfaceNormalStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() - var boundingBoxAreaStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() - var boundingBoxSurfaceNormalStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() +public struct IntegrityResults { + public var surfaceNormalStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() + public var boundingBoxAreaStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() + public var boundingBoxSurfaceNormalStatusDetails: IntegrityStatusDetails = IntegrityStatusDetails() } -struct SurfaceIntegrityProcessor { +public struct SurfaceIntegrityProcessor { let device: MTLDevice let commandQueue: MTLCommandQueue @@ -82,7 +82,7 @@ struct SurfaceIntegrityProcessor { let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw SurfaceIntegrityProcessorError.metalInitializationFailed @@ -123,7 +123,7 @@ struct SurfaceIntegrityProcessor { /** Main function to get surface integrity results from mesh data. Calls individual integrity assessment functions and aggregates results. */ - func getIntegrityResultsFromMesh( + public func getIntegrityResultsFromMesh( meshTriangles: [MeshTriangle], plane: Plane, damageDetectionResults: [DamageDetectionResult], @@ -156,7 +156,7 @@ struct SurfaceIntegrityProcessor { return integrityResults } - func getIntegrityResultsFromMeshCPU( + public func getIntegrityResultsFromMeshCPU( meshPolygons: [MeshPolygon], plane: Plane, damageDetectionResults: [DamageDetectionResult], @@ -192,7 +192,7 @@ struct SurfaceIntegrityProcessor { /** Main function to get surface integrity results from image data. Calls individual integrity assessment functions and aggregates results. */ - func getIntegrityResultsFromImage( + public func getIntegrityResultsFromImage( worldPointsGrid: WorldPointsGrid, plane: Plane, surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, @@ -232,7 +232,7 @@ struct SurfaceIntegrityProcessor { /** CPU implementation for surface integrity assessment from image. Used for benchmarking and fallback when Metal processing is not available. */ - func getIntegrityResultsFromImageCPU( + public func getIntegrityResultsFromImageCPU( worldPointsGrid: WorldPointsGrid, plane: Plane, surfaceNormalsForPointsGrid: SurfaceNormalsForPointsGrid, @@ -279,7 +279,7 @@ struct SurfaceIntegrityProcessor { /** Get angular deviation between normalized vectors v1 and v2 in degrees. */ - func getAngularDeviation(_ nv1: simd_float3, _ nv2: simd_float3) -> Float { + public func getAngularDeviation(_ nv1: simd_float3, _ nv2: simd_float3) -> Float { let dotProduct = simd_dot(nv1, nv2) let angleInRadians = acos(dotProduct) let angleInDegrees = angleInRadians * (180.0 / .pi) diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceNormals/SurfaceNormals.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceNormals/SurfaceNormals.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceNormals/SurfaceNormals.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceNormals/SurfaceNormals.metal diff --git a/IOSAccessAssessment/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift similarity index 94% rename from IOSAccessAssessment/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift index e22bd5a8..b8d95cd6 100644 --- a/IOSAccessAssessment/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceNormals/SurfaceNormalsProcessor.swift @@ -9,27 +9,28 @@ import ARKit import RealityKit import MetalKit import simd +import PointNMapShaderTypes -struct SurfaceNormalsForPointsGrid: Sendable { - let width: Int - let height: Int - var data: [SurfaceNormalsForPointsGridCell] +public struct SurfaceNormalsForPointsGrid: Sendable { + public let width: Int + public let height: Int + public var data: [SurfaceNormalsForPointsGridCell] /// TODO: Handle out-of-bounds access more robustly, possibly with a custom error or by returning an optional. - subscript(x: Int, y: Int) -> SurfaceNormalsForPointsGridCell { + public subscript(x: Int, y: Int) -> SurfaceNormalsForPointsGridCell { get { return data[y * width + x] } set { data[y * width + x] = newValue } } } -enum SurfaceNormalsProcessorError: Error, LocalizedError { +public enum SurfaceNormalsProcessorError: Error, LocalizedError { case metalInitializationFailed case metalPipelineCreationError case metalPipelineBlitEncoderError case invalidProjectedPlaneVectors case unableToProcessBufferData - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalInitializationFailed: return "Failed to initialize Metal resources." @@ -45,7 +46,7 @@ enum SurfaceNormalsProcessorError: Error, LocalizedError { } } -struct SurfaceNormalsProcessor { +public struct SurfaceNormalsProcessor { let device: MTLDevice let commandQueue: MTLCommandQueue @@ -55,7 +56,7 @@ struct SurfaceNormalsProcessor { let ciContext: CIContext - init() throws { + public init() throws { guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else { throw SurfaceNormalsProcessorError.metalInitializationFailed @@ -66,11 +67,12 @@ struct SurfaceNormalsProcessor { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let computeKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "computeSurfaceNormals"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let computeKernelFunction = library.makeFunction(name: "computeSurfaceNormals"), let computePipeline = try? device.makeComputePipelineState(function: computeKernelFunction) else { throw SurfaceNormalsProcessorError.metalInitializationFailed } - guard let boundsKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "getSurfaceNormalsWithinBounds"), + guard let boundsKernelFunction = library.makeFunction(name: "getSurfaceNormalsWithinBounds"), let boundsPipeline = try? device.makeComputePipelineState(function: boundsKernelFunction) else { throw SurfaceNormalsProcessorError.metalInitializationFailed } @@ -96,7 +98,7 @@ struct SurfaceNormalsProcessor { - Note: The sampling is done in DDA-style (Digital Differential Analyzer) to ensure that the neighbors are equidistant and opposite to the point in the grid. */ - func getSurfaceNormalsFromWorldPoints( + public func getSurfaceNormalsFromWorldPoints( worldPointsGrid: WorldPointsGrid, plane: Plane, projectedPlane: ProjectedPlane, @@ -201,7 +203,7 @@ struct SurfaceNormalsProcessor { return surfaceNormalsGrid } - func getSurfaceNormalsFromWorldPointsCPU( + public func getSurfaceNormalsFromWorldPointsCPU( worldPointsGrid: WorldPointsGrid, plane: Plane, projectedPlane: ProjectedPlane, @@ -306,7 +308,7 @@ struct SurfaceNormalsProcessor { return dir / maxComp } - func debugSurfaceNormalsFromWorldPoints(surfaceNormalsGrid: SurfaceNormalsForPointsGrid) { + public func debugSurfaceNormalsFromWorldPoints(surfaceNormalsGrid: SurfaceNormalsForPointsGrid) { var validPointCount = 0 var validSurfaceNormalCount = 0 let upVector = simd_float3(0, 1, 0) diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift index b073b776..b5710e5c 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/WorldPoints/WorldPointsProcessor.swift @@ -68,17 +68,18 @@ public struct WorldPointsProcessor { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let worldPointskernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "computeWorldPoints"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let worldPointskernelFunction = library.makeFunction(name: "computeWorldPoints"), let worldPointsPipeline = try? device.makeComputePipelineState(function: worldPointskernelFunction) else { throw WorldPointsProcessorError.metalInitializationFailed } self.worldPointsPipeline = worldPointsPipeline - guard let projectionKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "projectPointsToPlane"), + guard let projectionKernelFunction = library.makeFunction(name: "projectPointsToPlane"), let projectionPipeline = try? device.makeComputePipelineState(function: projectionKernelFunction) else { throw WorldPointsProcessorError.metalInitializationFailed } self.projectionPipeline = projectionPipeline - guard let gridKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "restructureWorldPointsToGrid"), + guard let gridKernelFunction = library.makeFunction(name: "restructureWorldPointsToGrid"), let gridPipeline = try? device.makeComputePipelineState(function: gridKernelFunction) else { throw WorldPointsProcessorError.metalInitializationFailed } diff --git a/IOSAccessAssessment/MachineLearning/Clustering/ConnectedComponents.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/ConnectedComponents.swift similarity index 97% rename from IOSAccessAssessment/MachineLearning/Clustering/ConnectedComponents.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/ConnectedComponents.swift index ff81a9e8..dbb81b9e 100644 --- a/IOSAccessAssessment/MachineLearning/Clustering/ConnectedComponents.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/ConnectedComponents.swift @@ -10,7 +10,7 @@ TODO: Move the logic to Metal for performance improvements. */ -struct ConnectedComponents { +public struct ConnectedComponents { private class Point: Equatable { typealias Label = Int @@ -38,7 +38,7 @@ struct ConnectedComponents { - adjacencyFunction: A function that checks if two values are adjacent (connected). If they are not, checks if their distance is within a threshold. */ - init( + public init( minimumNumberOfPoints: Int, adjacencyFunction: @escaping (Value, Value, Float) throws -> Bool, adjacencyThreshold: Float = 0.0 ) { diff --git a/IOSAccessAssessment/MachineLearning/Clustering/DBSCAN.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/DBSCAN.swift similarity index 95% rename from IOSAccessAssessment/MachineLearning/Clustering/DBSCAN.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/DBSCAN.swift index d7860697..397278d1 100644 --- a/IOSAccessAssessment/MachineLearning/Clustering/DBSCAN.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Clustering/DBSCAN.swift @@ -15,7 +15,7 @@ TODO: Move the logic to Metal for performance improvements. */ -struct DBSCAN { +public struct DBSCAN { private class Point: Equatable { typealias Label = Int @@ -46,7 +46,7 @@ struct DBSCAN { - distanceFunction: A function that computes the distance between two values. */ - init(epsilon: Double, minimumNumberOfPoints: Int, distanceFunction: @escaping (Value, Value) throws -> Double) { + public init(epsilon: Double, minimumNumberOfPoints: Int, distanceFunction: @escaping (Value, Value) throws -> Double) { self.epsilon = epsilon self.minimumNumberOfPoints = minimumNumberOfPoints self.distanceFunction = distanceFunction diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionPipeline.swift similarity index 77% rename from IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionPipeline.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionPipeline.swift index 19cec93a..b46cf104 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionPipeline.swift @@ -6,13 +6,14 @@ // import SwiftUI +import Combine import Vision import CoreML -enum DamageDetectionPipelineError: Error, LocalizedError { +public enum DamageDetectionPipelineError: Error, LocalizedError { case detectionResourcesNotConfigured - var errorDescription: String? { + public var errorDescription: String? { switch self { case .detectionResourcesNotConfigured: return "The Detection Image Pipeline resources are not configured" @@ -25,14 +26,14 @@ enum DamageDetectionPipelineError: Error, LocalizedError { This class serves as the main interface for processing damage detection requests. In the future, it can include logic for asynchronous processing, request queuing, and more. */ -final class DamageDetectionPipeline: ObservableObject { +public final class DamageDetectionPipeline: ObservableObject { private var damageDetectionModelRequestProcessor: DamageDetectionModelRequestProcessor? - func configure() throws { + public func configure() throws { self.damageDetectionModelRequestProcessor = try DamageDetectionModelRequestProcessor() } - func processRequest(with cIImage: CIImage) throws -> [DamageDetectionResult] { + public func processRequest(with cIImage: CIImage) throws -> [DamageDetectionResult] { guard let damageDetectionModelRequestProcessor = self.damageDetectionModelRequestProcessor else { throw DamageDetectionPipelineError.detectionResourcesNotConfigured } diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift similarity index 93% rename from IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift index 2e183dfd..d299f1cf 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/DamageDetectionRasterizer.swift @@ -7,10 +7,9 @@ import CoreImage import UIKit -import PointNMapShared -struct DamageDetectionRasterizer { - static func rasterizeDamageDetection( +public struct DamageDetectionRasterizer { + public static func rasterizeDamageDetection( damageDetectionResults: [DamageDetectionResult], size: CGSize, boundsConfig: RasterizeConfig = RasterizeConfig(color: .red, width: 8.0) diff --git a/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift similarity index 87% rename from IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift index f717b128..f3074c19 100644 --- a/IOSAccessAssessment/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/DamageDetection/Functions/DamageDetectionModelRequestProcessor.swift @@ -8,12 +8,13 @@ import CoreML import Vision import CoreImage +import PointNMapShaderTypes -enum DamageDetectionModelError: Error, LocalizedError { +public enum DamageDetectionModelError: Error, LocalizedError { case modelLoadingError case detectionProcessingError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .modelLoadingError: return "Failed to load the damage detection model." @@ -30,14 +31,14 @@ enum DamageDetectionModelError: Error, LocalizedError { - confidence: A VNConfidence value (between 0 and 1) indicating the confidence level of the detection. - label: A String representing the label or category of the detected damage. */ -struct DamageDetectionResult: Sendable, Hashable { - var boundingBox: CGRect - var confidence: VNConfidence - var label: String +public struct DamageDetectionResult: Sendable, Hashable { + public var boundingBox: CGRect + public var confidence: VNConfidence + public var label: String /// The bounding box returned by the model is in normalized coordinates (0 to 1) with the origin at the bottom-left corner. /// To convert it to pixel coordinates, we need to adjust the y-coordinate and scale it according to the image size. - func getPixelCGRect(for imageSize: CGSize) -> CGRect { + public func getPixelCGRect(for imageSize: CGSize) -> CGRect { return CGRect( x: CGFloat(boundingBox.origin.x) * imageSize.width, y: CGFloat(1.0 - boundingBox.origin.y - boundingBox.size.height) * imageSize.height, @@ -49,7 +50,7 @@ struct DamageDetectionResult: Sendable, Hashable { /** This function calculates the parameters needed to get the bounds of the detected damage in pixel coordinates, given the size of the image. */ - func getBoundsParams(for imageSize: CGSize) -> BoundsParams { + public func getBoundsParams(for imageSize: CGSize) -> BoundsParams { let pixelRect: CGRect = self.getPixelCGRect(for: imageSize) let minX = max(Float(pixelRect.minX), 0) let maxX = min(Float(pixelRect.maxX), Float(imageSize.width) - 1) @@ -66,11 +67,11 @@ struct DamageDetectionResult: Sendable, Hashable { - CGRect: (x, y, width, height) in normalized coordinates (0 to 1), where x and y represent the bottom-left corner of the bounding box relative to the image dimensions. - Confidence Score: A value between 0 and 1 indicating the confidence level of the detection */ -struct DamageDetectionModelRequestProcessor { - var visionModel: VNCoreMLModel +public struct DamageDetectionModelRequestProcessor { + public var visionModel: VNCoreMLModel - init() throws { - guard let modelURL = SharedAppConstants.DamageDetectionConstants.damageDetectionModelURL else { + public init() throws { + guard let modelURL = PointNMapConstants.DamageDetectionConstants.damageDetectionModelURL else { throw DamageDetectionModelError.modelLoadingError } let configuration: MLModelConfiguration = MLModelConfiguration() @@ -83,7 +84,7 @@ struct DamageDetectionModelRequestProcessor { request.imageCropAndScaleOption = .scaleFill } - func processDetectionRequest( + public func processDetectionRequest( with cIImage: CIImage, orientation: CGImagePropertyOrientation = .up ) throws -> [DamageDetectionResult] { let detectionRequest = VNCoreMLRequest(model: self.visionModel) diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift similarity index 86% rename from IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift index 785f9ca5..a45ec913 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift @@ -9,11 +9,11 @@ import Vision import CoreImage import PointNMapShared -enum SegmentationModelError: Error, LocalizedError { +public enum SegmentationModelError: Error, LocalizedError { case modelLoadingError case segmentationProcessingError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .modelLoadingError: return "Failed to load the segmentation model." @@ -27,12 +27,12 @@ enum SegmentationModelError: Error, LocalizedError { A struct to handle the segmentation model request processing. Processes the segmentation model request and returns the segmentation mask as well as the segmented indices. */ -struct SegmentationModelRequestProcessor { - var visionModel: VNCoreMLModel +public struct SegmentationModelRequestProcessor { + public var visionModel: VNCoreMLModel - var selectedClasses: [AccessibilityFeatureClass] = [] + public var selectedClasses: [AccessibilityFeatureClass] = [] - init(selectedClasses: [AccessibilityFeatureClass]) throws { + public init(selectedClasses: [AccessibilityFeatureClass]) throws { guard let modelURL = SharedAppConstants.SelectedAccessibilityFeatureConfig.modelURL else { throw SegmentationModelError.modelLoadingError } @@ -42,7 +42,7 @@ struct SegmentationModelRequestProcessor { self.selectedClasses = selectedClasses } - mutating func setSelectedClasses(_ classes: [AccessibilityFeatureClass]) { + public mutating func setSelectedClasses(_ classes: [AccessibilityFeatureClass]) { self.selectedClasses = classes } @@ -51,7 +51,7 @@ struct SegmentationModelRequestProcessor { request.imageCropAndScaleOption = .scaleFill } - func processSegmentationRequest( + public func processSegmentationRequest( with cIImage: CIImage, orientation: CGImagePropertyOrientation = .up ) throws -> (segmentationImage: CIImage, segmentedClasses: [AccessibilityFeatureClass]) { let segmentationRequest = VNCoreMLRequest(model: self.visionModel) diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift similarity index 92% rename from IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift index b6a5c361..90315373 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -13,7 +13,7 @@ import OrderedCollections import simd import PointNMapShared -enum SegmentationARPipelineError: Error, LocalizedError { +public enum SegmentationARPipelineError: Error, LocalizedError { case isProcessingTrue case emptySegmentation case segmentationResourcesNotConfigured @@ -22,7 +22,7 @@ enum SegmentationARPipelineError: Error, LocalizedError { case invalidTransform case unexpectedError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .isProcessingTrue: return "The Segmentation Image Pipeline is already processing a request." @@ -42,14 +42,14 @@ enum SegmentationARPipelineError: Error, LocalizedError { } } -struct SegmentationARPipelineResults { - var segmentationImage: CIImage - var originalSegmentationImage: CIImage - var segmentationColorImage: CIImage - var segmentedClasses: [AccessibilityFeatureClass] - var detectedFeatureMap: [UUID: DetectedAccessibilityFeature] +public struct SegmentationARPipelineResults { + public var segmentationImage: CIImage + public var originalSegmentationImage: CIImage + public var segmentationColorImage: CIImage + public var segmentedClasses: [AccessibilityFeatureClass] + public var detectedFeatureMap: [UUID: DetectedAccessibilityFeature] - init(segmentationImage: CIImage, segmentationColorImage: CIImage, + public init(segmentationImage: CIImage, segmentationColorImage: CIImage, segmentedClasses: [AccessibilityFeatureClass], detectedFeatureMap: [UUID: DetectedAccessibilityFeature], originalSegmentationImage: CIImage ) { @@ -66,7 +66,7 @@ struct SegmentationARPipelineResults { TODO: Rename this to `SegmentationImagePipeline` since AR is not a necessary component here. */ -final class SegmentationARPipeline: ObservableObject { +public final class SegmentationARPipeline: ObservableObject { private var isProcessing = false private var currentTask: Task? private var timeoutInSeconds: Double = 1.0 @@ -87,7 +87,7 @@ final class SegmentationARPipeline: ObservableObject { private var segmentationModelRequestProcessor: SegmentationModelRequestProcessor? private var contourRequestProcessor: ContourRequestProcessor? - func configure() throws { + public func configure() throws { self.segmentationModelRequestProcessor = try SegmentationModelRequestProcessor( selectedClasses: self.selectedClasses) self.contourRequestProcessor = try ContourRequestProcessor( @@ -98,12 +98,12 @@ final class SegmentationARPipeline: ObservableObject { self.depthFilter = try DepthFilter() } - func reset() { + public func reset() { self.isProcessing = false self.setSelectedClasses([]) } - func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { + public func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { self.selectedClasses = selectedClasses self.selectedClassLabels = selectedClasses.map { $0.labelValue } self.selectedClassGrayscaleValues = selectedClasses.map { $0.grayscaleValue } @@ -116,7 +116,7 @@ final class SegmentationARPipeline: ObservableObject { /** Function to process the segmentation request with the given CIImage. */ - func processRequest( + public func processRequest( with cIImage: CIImage, depthImage: CIImage? = nil, highPriority: Bool = false ) async throws -> SegmentationARPipelineResults { diff --git a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift similarity index 93% rename from IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift rename to PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift index 1fb0c0ee..1ef43670 100644 --- a/IOSAccessAssessment/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift @@ -12,7 +12,7 @@ import OrderedCollections import simd import PointNMapShared -enum SegmentationAnnotationPipelineError: Error, LocalizedError { +public enum SegmentationAnnotationPipelineError: Error, LocalizedError { case isProcessingTrue case homographyTransformFilterNil case imageHistoryEmpty @@ -21,7 +21,7 @@ enum SegmentationAnnotationPipelineError: Error, LocalizedError { case homographyRequestProcessorNil case invalidUnionImageResult - var errorDescription: String? { + public var errorDescription: String? { switch self { case .isProcessingTrue: return "The SegmentationAnnotationPipeline is already processing a request." @@ -50,9 +50,9 @@ enum SegmentationAnnotationPipelineError: Error, LocalizedError { MARK: Also, instead of runnin the whole pipeline at once, this class needs to run individual steps of the pipeline separately, as they occur at different steps in the app flow. Hence, it gives full control to the caller to run the steps as needed. */ -final class SegmentationAnnotationPipeline: ObservableObject { +public final class SegmentationAnnotationPipeline: ObservableObject { /// This will be useful only when we are using the pipeline in asynchronous mode. - var isProcessing = false + public var isProcessing = false // private var currentTask: Task? // private var timeoutInSeconds: Double = 3.0 @@ -62,10 +62,10 @@ final class SegmentationAnnotationPipeline: ObservableObject { private var selectedClassColors: [CIColor] = [] // TODO: Check what would be the appropriate value for this - var contourEpsilon: Float = 0.01 + public var contourEpsilon: Float = 0.01 // TODO: Check what would be the appropriate value for this // For normalized points - var perimeterThreshold: Float = 0.2 + public var perimeterThreshold: Float = 0.2 private var contourRequestProcessor: ContourRequestProcessor? private var homographyRequestProcessor: HomographyRequestProcessor? @@ -75,7 +75,7 @@ final class SegmentationAnnotationPipeline: ObservableObject { /// TODO: Replace with the global Metal context private let context = CIContext() - func configure() throws { + public func configure() throws { self.contourRequestProcessor = try ContourRequestProcessor( contourEpsilon: self.contourEpsilon, perimeterThreshold: self.perimeterThreshold, @@ -86,12 +86,12 @@ final class SegmentationAnnotationPipeline: ObservableObject { self.unionOfMasksProcessor = try UnionOfMasksProcessor() } - func reset() { + public func reset() { self.isProcessing = false self.setSelectedClasses([]) } - func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { + public func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { self.selectedClasses = selectedClasses self.selectedClassLabels = selectedClasses.map { $0.labelValue } self.selectedClassGrayscaleValues = selectedClasses.map { $0.grayscaleValue } @@ -100,7 +100,7 @@ final class SegmentationAnnotationPipeline: ObservableObject { self.contourRequestProcessor?.setSelectedClasses(self.selectedClasses) } - func processAlignImageDataRequest( + public func processAlignImageDataRequest( currentCaptureData: CaptureImageData, captureDataHistory: [CaptureImageData] ) throws -> [CIImage] { if self.isProcessing { @@ -168,14 +168,14 @@ final class SegmentationAnnotationPipeline: ObservableObject { return homographyTransform } - func setupUnionOfMasksRequest(alignedSegmentationLabelImages: [CIImage]) throws { + public func setupUnionOfMasksRequest(alignedSegmentationLabelImages: [CIImage]) throws { guard let unionOfMasksProcessor = self.unionOfMasksProcessor else { throw SegmentationAnnotationPipelineError.unionOfMasksProcessorNil } try unionOfMasksProcessor.setArrayTexture(images: alignedSegmentationLabelImages) } - func processUnionOfMasksRequest( + public func processUnionOfMasksRequest( accessibilityFeatureClass: AccessibilityFeatureClass, orientation: CGImagePropertyOrientation = .up ) throws -> CIImage { @@ -207,7 +207,7 @@ final class SegmentationAnnotationPipeline: ObservableObject { return unionImage } - func processContourRequest( + public func processContourRequest( segmentationLabelImage: CIImage, accessibilityFeatureClass: AccessibilityFeatureClass, orientation: CGImagePropertyOrientation = .up ) throws -> [DetectedAccessibilityFeature] { diff --git a/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift index 9a147d74..f35d4c59 100644 --- a/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift @@ -21,7 +21,9 @@ public struct PointNMapConstants { public struct DamageDetectionConstants { /// Model-specific SharedAppConstants - public static let damageDetectionModelURL: URL? = Bundle.main.url(forResource: "v8n_175_16_960", withExtension: "mlmodelc") + public static let damageDetectionModelURL: URL? = PointNMapSharedResources.bundle.url( + forResource: "v8n_175_16_960", withExtension: "mlmodelc" + ) public static let inputSize: CGSize = CGSize(width: 640, height: 640) } From ab15a85481e51020cb8984cf310bf27aff48dd13 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 15:27:40 -0700 Subject: [PATCH 09/14] Add attribute estimation related code to frameworks --- IOSAccessAssessment.xcodeproj/project.pbxproj | 76 ----------------- .../AttributeEstimationPipeline.swift | 82 +++++++++---------- .../OtherAttributeExtensionLegacy.swift | 3 +- .../Extensions/Location/LocationDetails.swift | 22 ++--- .../Location/LocationExtension.swift | 6 +- .../Location/LocationFromImageExtension.swift | 2 +- .../Location/LocationFromMeshExtension.swift | 2 +- .../OtherAttributes/CrossSlopeExtension.swift | 3 +- .../RunninSlopeExtension.swift | 3 +- .../SurfaceIntegrityExtension.swift | 3 +- .../OtherAttributes/WidthExtension.swift | 3 +- .../Extensions/UtilityExtension.swift | 5 +- 12 files changed, 65 insertions(+), 145 deletions(-) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift (85%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift (99%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift (87%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift (99%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift (99%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift (98%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index e442425f..3f58c8d8 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -29,10 +29,6 @@ A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */; }; A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */; }; A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */; }; - A30F59CE2F7EFAC000EE7804 /* WidthExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CD2F7EFABC00EE7804 /* WidthExtension.swift */; }; - A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */; }; - A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */; }; - A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */; }; A312FD862FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; @@ -53,8 +49,6 @@ A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */; }; A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */; }; A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; - A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E012F26FA2700B96610 /* LocationExtension.swift */; }; - A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */; }; A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; @@ -70,7 +64,6 @@ A35BB2862DC30386009A3FE0 /* CameraOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */; }; A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */; }; A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */; }; - A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05172EDEA470003C26CF /* AttributeEstimationPipeline.swift */; }; A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05192EDFB015003C26CF /* OSMPayload.swift */; }; A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051B2EDFB093003C26CF /* OSMNode.swift */; }; A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051D2EDFB099003C26CF /* OSMWay.swift */; }; @@ -113,10 +106,6 @@ A3EE6E4E2F5A258B00F515E6 /* TestCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E4D2F5A258700F515E6 /* TestCameraManager.swift */; }; A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E4F2F5A3EF100F515E6 /* TestCameraViewController.swift */; }; A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E512F5F9F1100F515E6 /* APITransmissionHelpers.swift */; }; - A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E532F67A40B00F515E6 /* UtilityExtension.swift */; }; - A3EE6EFC2F69285600F515E6 /* LocationFromImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */; }; - A3EE6EFE2F69287F00F515E6 /* LocationFromMeshExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */; }; - A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */; }; A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3F27DB22D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage */; }; A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3F38C4B2D38A2C500900547 /* DepthModel.swift */; }; A3FCC2FB2DA4E1880037AB43 /* OrderedCollections in Frameworks */ = {isa = PBXBuildFile; productRef = A3FCC2FA2DA4E1880037AB43 /* OrderedCollections */; }; @@ -237,10 +226,6 @@ A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttributeExtension.swift; sourceTree = ""; }; A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditableAccessibilityFeature.swift; sourceTree = ""; }; A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedAccessibilityFeature.swift; sourceTree = ""; }; - A30F59CD2F7EFABC00EE7804 /* WidthExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidthExtension.swift; sourceTree = ""; }; - A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunninSlopeExtension.swift; sourceTree = ""; }; - A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CrossSlopeExtension.swift; sourceTree = ""; }; - A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SurfaceIntegrityExtension.swift; sourceTree = ""; }; A312FD7B2FA3391B0044808E /* PointNMapShared.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShared.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PointNMapSharedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthMapProcessorExtension.swift; sourceTree = ""; }; @@ -258,8 +243,6 @@ A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWMultiPolygon.swift; sourceTree = ""; }; A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEndpoint.swift; sourceTree = ""; }; A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEnvironment.swift; sourceTree = ""; }; - A3431E012F26FA2700B96610 /* LocationExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationExtension.swift; sourceTree = ""; }; - A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherAttributeExtensionLegacy.swift; sourceTree = ""; }; A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpersExtension.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; @@ -275,7 +258,6 @@ A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraOrientation.swift; sourceTree = ""; }; A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InvalidContentView.swift; sourceTree = ""; }; A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIChangesetUploadController.swift; sourceTree = ""; }; - A35E05172EDEA470003C26CF /* AttributeEstimationPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttributeEstimationPipeline.swift; sourceTree = ""; }; A35E05192EDFB015003C26CF /* OSMPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMPayload.swift; sourceTree = ""; }; A35E051B2EDFB093003C26CF /* OSMNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMNode.swift; sourceTree = ""; }; A35E051D2EDFB099003C26CF /* OSMWay.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMWay.swift; sourceTree = ""; }; @@ -321,10 +303,6 @@ A3EE6E4D2F5A258700F515E6 /* TestCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestCameraManager.swift; sourceTree = ""; }; A3EE6E4F2F5A3EF100F515E6 /* TestCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestCameraViewController.swift; sourceTree = ""; }; A3EE6E512F5F9F1100F515E6 /* APITransmissionHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APITransmissionHelpers.swift; sourceTree = ""; }; - A3EE6E532F67A40B00F515E6 /* UtilityExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UtilityExtension.swift; sourceTree = ""; }; - A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationFromImageExtension.swift; sourceTree = ""; }; - A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationFromMeshExtension.swift; sourceTree = ""; }; - A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationDetails.swift; sourceTree = ""; }; A3F27DB22D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = DepthAnythingV2SmallF16.mlpackage; sourceTree = ""; }; A3F38C4B2D38A2C500900547 /* DepthModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthModel.swift; sourceTree = ""; }; A3FE16602E18BA5600DAE5BE /* RGBCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RGBCoder.swift; sourceTree = ""; }; @@ -526,7 +504,6 @@ children = ( A33EB5AD2F761BFC008ABFB7 /* ModelZoo */, A3F38C4A2D38A2BF00900547 /* DepthEstimation */, - DAA7F8BE2CA683DC003666D8 /* Segmentation */, ); path = MachineLearning; sourceTree = ""; @@ -636,28 +613,6 @@ path = Definitions; sourceTree = ""; }; - A30F59CA2F7EFA8A00EE7804 /* OtherAttributes */ = { - isa = PBXGroup; - children = ( - A30F59CD2F7EFABC00EE7804 /* WidthExtension.swift */, - A30F59CF2F7EFAC400EE7804 /* RunninSlopeExtension.swift */, - A30F59D12F7EFACA00EE7804 /* CrossSlopeExtension.swift */, - A30F59D32F7EFAD100EE7804 /* SurfaceIntegrityExtension.swift */, - ); - path = OtherAttributes; - sourceTree = ""; - }; - A30F59CB2F7EFA9500EE7804 /* Location */ = { - isa = PBXGroup; - children = ( - A3431E012F26FA2700B96610 /* LocationExtension.swift */, - A3EE6EFB2F69285100F515E6 /* LocationFromImageExtension.swift */, - A3EE6EFD2F69287A00F515E6 /* LocationFromMeshExtension.swift */, - A3EE6EFF2F6A29F300F515E6 /* LocationDetails.swift */, - ); - path = Location; - sourceTree = ""; - }; A312FE1F2FA3EC710044808E /* Frameworks */ = { isa = PBXGroup; children = ( @@ -675,14 +630,6 @@ path = UI; sourceTree = ""; }; - A3247B2C2F31325600F915A9 /* Archive */ = { - isa = PBXGroup; - children = ( - A3431E032F26FA6B00B96610 /* OtherAttributeExtensionLegacy.swift */, - ); - path = Archive; - sourceTree = ""; - }; A329433D2EE7BF0200C4C1BC /* Others */ = { isa = PBXGroup; children = ( @@ -724,10 +671,6 @@ A3431E002F26FA0C00B96610 /* Extensions */ = { isa = PBXGroup; children = ( - A3247B2C2F31325600F915A9 /* Archive */, - A30F59CB2F7EFA9500EE7804 /* Location */, - A30F59CA2F7EFA8A00EE7804 /* OtherAttributes */, - A3EE6E532F67A40B00F515E6 /* UtilityExtension.swift */, ); path = Extensions; sourceTree = ""; @@ -765,7 +708,6 @@ isa = PBXGroup; children = ( A3431E002F26FA0C00B96610 /* Extensions */, - A35E05172EDEA470003C26CF /* AttributeEstimationPipeline.swift */, ); path = AttributeEstimation; sourceTree = ""; @@ -1072,13 +1014,6 @@ path = ViewModel; sourceTree = ""; }; - DAA7F8BE2CA683DC003666D8 /* Segmentation */ = { - isa = PBXGroup; - children = ( - ); - path = Segmentation; - sourceTree = ""; - }; DAA7F8C62CA76514003666D8 /* Image */ = { isa = PBXGroup; children = ( @@ -1370,13 +1305,10 @@ A308015F2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift in Sources */, A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */, A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */, - A3431E022F26FA2C00B96610 /* LocationExtension.swift in Sources */, - A30F59D02F7EFAC700EE7804 /* RunninSlopeExtension.swift in Sources */, A30801602EC09BB700B1BA3A /* VOCClassConfig.swift in Sources */, A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */, A30801612EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift in Sources */, A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */, - A30F59D42F7EFAD400EE7804 /* SurfaceIntegrityExtension.swift in Sources */, A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */, @@ -1400,21 +1332,18 @@ A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */, A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, - A35E05182EDEA476003C26CF /* AttributeEstimationPipeline.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */, A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */, A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */, - A3EE6EFC2F69285600F515E6 /* LocationFromImageExtension.swift in Sources */, 55659C102BB7863F0094DF01 /* SetupView.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */, A3EE6E4C2F580E2B00F515E6 /* DatasetLister.swift in Sources */, - A3EE6EFE2F69287F00F515E6 /* LocationFromMeshExtension.swift in Sources */, A3FE16652E18C54000DAE5BE /* CameraTransformCoder.swift in Sources */, A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */, CAA9477B2CDE70D9000C6918 /* KeychainService.swift in Sources */, @@ -1423,7 +1352,6 @@ A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */, CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, - A3EE6F002F6A29F500F515E6 /* LocationDetails.swift in Sources */, A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */, A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */, A32943532EE814A700C4C1BC /* OSWElement.swift in Sources */, @@ -1434,7 +1362,6 @@ A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */, A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */, A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, - A3431E042F26FA7200B96610 /* OtherAttributeExtensionLegacy.swift in Sources */, A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, @@ -1445,14 +1372,12 @@ A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */, - A30F59CE2F7EFAC000EE7804 /* WidthExtension.swift in Sources */, DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */, A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */, A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */, A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, - A3EE6E542F67A41100F515E6 /* UtilityExtension.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, @@ -1468,7 +1393,6 @@ A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */, DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, - A30F59D22F7EFACD00EE7804 /* CrossSlopeExtension.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift similarity index 85% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index 65f0cb14..cd52d03b 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -10,7 +10,7 @@ import CoreLocation import MapKit import PointNMapShared -enum AttributeEstimationPipelineError: Error, LocalizedError { +public enum AttributeEstimationPipelineError: Error, LocalizedError { case configurationError(String) case missingCaptureData case missingDepthImage @@ -18,7 +18,7 @@ enum AttributeEstimationPipelineError: Error, LocalizedError { case invalidAttributeData case attributeAssignmentError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .configurationError(let missingDetail): return NSLocalizedString("Error occurred during pipeline configuration. Details: \(missingDetail)", comment: "") @@ -36,54 +36,54 @@ enum AttributeEstimationPipelineError: Error, LocalizedError { } } -struct LocationRequestResult: Sendable { - let locationDetails: LocationDetails - let locationDelta: SIMD2 - let lidarDepth: Float +public struct LocationRequestResult: Sendable { + public let locationDetails: LocationDetails + public let locationDelta: SIMD2 + public let lidarDepth: Float } -enum AttributeEstimationPipelineConstants { - enum Texts { - static let depthMapProcessorKey = "Depth Map Processor" - static let localizationProcessorKey = "Localization Processor" - static let planeProcessorKey = "Plane Processor" - static let planeAttributeProcessorKey = "Plane Attribute Processor" - static let worldPointsProcessorKey = "World Points Processor" +public enum AttributeEstimationPipelineConstants { + public enum Texts { + public static let depthMapProcessorKey = "Depth Map Processor" + public static let localizationProcessorKey = "Localization Processor" + public static let planeProcessorKey = "Plane Processor" + public static let planeAttributeProcessorKey = "Plane Attribute Processor" + public static let worldPointsProcessorKey = "World Points Processor" } } /** An attribute estimation pipeline that processes editable accessibility features to estimate their attributes. */ -class AttributeEstimationPipeline: ObservableObject { - struct PrerequisiteCache: Sendable { - var worldPoints: [WorldPoint]? = nil - var worldPointsGrid: WorldPointsGrid? = nil - var pointAlignedPlane: Plane? = nil - var pointProjectedPlane: ProjectedPlane? = nil - var meshContents: MeshContents? = nil - var meshPolygons: [MeshPolygon]? = nil - var meshTriangles: [MeshTriangle]? = nil - var meshAlignedPlane: Plane? = nil - var meshProjectedPlane: ProjectedPlane? = nil +public class AttributeEstimationPipeline: ObservableObject { + public struct PrerequisiteCache: Sendable { + public var worldPoints: [WorldPoint]? = nil + public var worldPointsGrid: WorldPointsGrid? = nil + public var pointAlignedPlane: Plane? = nil + public var pointProjectedPlane: ProjectedPlane? = nil + public var meshContents: MeshContents? = nil + public var meshPolygons: [MeshPolygon]? = nil + public var meshTriangles: [MeshTriangle]? = nil + public var meshAlignedPlane: Plane? = nil + public var meshProjectedPlane: ProjectedPlane? = nil } - var captureImageData: (any CaptureImageDataProtocol)? - var captureMeshData: (any CaptureMeshDataProtocol)? + public var captureImageData: (any CaptureImageDataProtocol)? + public var captureMeshData: (any CaptureMeshDataProtocol)? - var depthMapProcessor: DepthMapProcessor? - var localizationProcessor: LocalizationProcessor? - var worldPointsProcessor: WorldPointsProcessor? - var planeProcessor: PlaneProcessor? - var planeAttributeProcessor: PlaneAttributeProcessor? - var damageDetectionPipeline: DamageDetectionPipeline? - var surfaceNormalsProcessor: SurfaceNormalsProcessor? - var surfaceIntegrityProcessor: SurfaceIntegrityProcessor? + public var depthMapProcessor: DepthMapProcessor? + public var localizationProcessor: LocalizationProcessor? + public var worldPointsProcessor: WorldPointsProcessor? + public var planeProcessor: PlaneProcessor? + public var planeAttributeProcessor: PlaneAttributeProcessor? + public var damageDetectionPipeline: DamageDetectionPipeline? + public var surfaceNormalsProcessor: SurfaceNormalsProcessor? + public var surfaceIntegrityProcessor: SurfaceIntegrityProcessor? - var prerequisiteCache = PrerequisiteCache() + public var prerequisiteCache = PrerequisiteCache() /// TODO: MESH PROCESSING: Add mesh data processing components when needed. - func configure( + public func configure( captureImageData: (any CaptureImageDataProtocol), captureMeshData: (any CaptureMeshDataProtocol)? ) throws { @@ -106,7 +106,7 @@ class AttributeEstimationPipeline: ObservableObject { self.damageDetectionPipeline = damageDetectionPipeline } - func setPrerequisites( + public func setPrerequisites( accessibilityFeature: EditableAccessibilityFeature ) throws { let oswElementClass = accessibilityFeature.accessibilityFeatureClass.oswPolicy.oswElementClass @@ -159,7 +159,7 @@ class AttributeEstimationPipeline: ObservableObject { self.prerequisiteCache.meshProjectedPlane = meshProjectedPlane } - func clearPrerequisites() { + public func clearPrerequisites() { self.prerequisiteCache.worldPoints = nil self.prerequisiteCache.pointAlignedPlane = nil self.prerequisiteCache.meshContents = nil @@ -168,7 +168,7 @@ class AttributeEstimationPipeline: ObservableObject { self.prerequisiteCache.meshAlignedPlane = nil } - func processLocationRequest( + public func processLocationRequest( deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature ) throws { @@ -210,7 +210,7 @@ class AttributeEstimationPipeline: ObservableObject { } - func processIsExistingRequest( + public func processIsExistingRequest( deviceLocation: CLLocationCoordinate2D, mappingData: CurrentMappingData, accessibilityFeature: EditableAccessibilityFeature @@ -235,7 +235,7 @@ class AttributeEstimationPipeline: ObservableObject { accessibilityFeature.setOSWElement(oswElement: matchedElement) } - func processAttributeRequest( + public func processAttributeRequest( accessibilityFeature: EditableAccessibilityFeature ) throws { var attributeAssignmentFlagError = false diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift similarity index 99% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift index 44e3979e..a7690e0f 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift @@ -6,7 +6,6 @@ // import SwiftUI import CoreLocation -import PointNMapShared /** Extension for attribute calculation with rudimentary methods. @@ -14,7 +13,7 @@ import PointNMapShared NOTE: Legacy Implementation. Needs to be improved. */ -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateWidthLegacy( accessibilityFeature: EditableAccessibilityFeature ) throws -> AccessibilityFeatureAttribute.Value { diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift similarity index 87% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift index 99941b6f..8d00a2e1 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationDetails.swift @@ -8,26 +8,26 @@ import Foundation import CoreLocation -struct LocationElement: Codable, Sendable { - var coordinates: [CLLocationCoordinate2D] +public struct LocationElement: Codable, Sendable { + public var coordinates: [CLLocationCoordinate2D] /// TODO: We can add an optional `members` property to LocationElement that can hold child elements, and update the encoding/decoding logic to handle this new property appropriately. This way, we can represent the hierarchical nature of OSM data while still maintaining a clear structure for each element type. // var members: [LocationElement]? - var isWay: Bool - var isClosed: Bool + public var isWay: Bool + public var isClosed: Bool - init(coordinates: [CLLocationCoordinate2D], isWay: Bool, isClosed: Bool) { + public init(coordinates: [CLLocationCoordinate2D], isWay: Bool, isClosed: Bool) { self.coordinates = coordinates self.isWay = isWay self.isClosed = isClosed } - enum CodingKeys: String, CodingKey { + public enum CodingKeys: String, CodingKey { case coordinates case isWay case isClosed } - func encode(to encoder: Encoder) throws { + public func encode(to encoder: Encoder) throws { var container = encoder.container(keyedBy: CodingKeys.self) let encodedCoordinates = coordinates.map { coordinate in [coordinate.latitude, coordinate.longitude] @@ -35,7 +35,7 @@ struct LocationElement: Codable, Sendable { try container.encode(encodedCoordinates, forKey: .coordinates) } - init(from decoder: any Decoder) throws { + public init(from decoder: any Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) let decodedCoordinates = try container.decode([[Double]].self, forKey: .coordinates) self.coordinates = try decodedCoordinates.map { coordinateArray in @@ -65,13 +65,13 @@ struct LocationElement: Codable, Sendable { However, this will need modification to caller code that constructs/uses/modifies LocationDetails, because they will need to account for the possibility of nested members when working with OSM data. */ public struct LocationDetails: Codable, Sendable { - var locations: [LocationElement] + public var locations: [LocationElement] - init(locations: [LocationElement]) { + public init(locations: [LocationElement]) { self.locations = locations } - enum CodingKeys: String, CodingKey { + public enum CodingKeys: String, CodingKey { case locations } diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift index a653e64c..84d598df 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift @@ -8,7 +8,7 @@ import SwiftUI import CoreLocation import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateLocation( deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature @@ -45,7 +45,7 @@ extension AttributeEstimationPipeline { /** Extension for additional location processing methods. */ -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateLocationFromImageForPoint( deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature @@ -143,7 +143,7 @@ extension AttributeEstimationPipeline { } } -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateLocationFromMeshForLineString( deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift similarity index 99% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift index 430b6c00..8ce3cf2d 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift @@ -8,7 +8,7 @@ import SwiftUI import CoreLocation import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func getLocationFromImageByCentroid( depthMapProcessor: DepthMapProcessor, localizationProcessor: LocalizationProcessor, diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift index efe5390e..93f2a962 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift @@ -9,7 +9,7 @@ import SwiftUI import CoreLocation import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func getLocationFromMeshForLineStringByPlane( depthMapProcessor: DepthMapProcessor, localizationProcessor: LocalizationProcessor, diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift index 253b4002..ed8aab81 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift @@ -7,9 +7,8 @@ import SwiftUI import CoreLocation -import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateCrossSlope( accessibilityFeature: EditableAccessibilityFeature ) throws -> AccessibilityFeatureAttribute.Value { diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift index 75cd7631..5867294d 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift @@ -7,9 +7,8 @@ import SwiftUI import CoreLocation -import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateRunningSlope( accessibilityFeature: EditableAccessibilityFeature ) throws -> AccessibilityFeatureAttribute.Value { diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift similarity index 99% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift index e0399d9c..3eb73a87 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift @@ -7,9 +7,8 @@ import SwiftUI import CoreLocation -import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateSurfaceIntegrity( accessibilityFeature: EditableAccessibilityFeature ) throws -> AccessibilityFeatureAttribute.Value { diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift index b1b37db2..bc2a4e56 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift @@ -7,9 +7,8 @@ import SwiftUI import CoreLocation -import PointNMapShared -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func calculateWidth( accessibilityFeature: EditableAccessibilityFeature ) throws -> AccessibilityFeatureAttribute.Value { diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift index 9ba3aa99..0305ecbf 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift @@ -6,11 +6,12 @@ // import SwiftUI import CoreLocation +import PointNMapShaderTypes /** Extension for utilities related to world point extraction and plane calculation. */ -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { /** Get world points corresponding to the feature based on the segmentation label image and depth map, using the world points processor. */ @@ -110,7 +111,7 @@ extension AttributeEstimationPipeline { /** Extension for utilities related to mesh polygon extraction and plane calculation. */ -extension AttributeEstimationPipeline { +public extension AttributeEstimationPipeline { func getMeshContents( accessibilityFeature: EditableAccessibilityFeature ) throws -> MeshContents { From 0fab8405ee7bd626be45a9110d87cbcbc596a1bc Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 18:08:23 -0700 Subject: [PATCH 10/14] Modularize the accessibility feature class config by adding feature kind with additional file transfers --- IOSAccessAssessment.xcodeproj/project.pbxproj | 134 +------ .../AccessibilityFeatureKindExtension.swift | 22 ++ .../Depth/DepthMapProcessorExtension.swift | 92 ----- .../Definitions/CapturedMeshDefinitions.swift | 27 -- .../Geospatial/LocationHelpersExtension.swift | 371 ------------------ .../Shared/SharedAppConstants.swift | 1 + .../AttributeEstimationPipeline.swift | 5 +- .../Location/LocationFromImageExtension.swift | 2 +- .../Location/LocationFromMeshExtension.swift | 2 +- .../OtherAttributes/CrossSlopeExtension.swift | 1 + .../RunninSlopeExtension.swift | 1 + .../SurfaceIntegrityExtension.swift | 3 +- .../OtherAttributes/WidthExtension.swift | 1 + .../Components/MeshInstancePolicy.swift | 14 +- .../Config/AccessibilityFeatureConfig.swift | 75 ++-- .../Config/AccessibilityFeatureKind.swift | 22 ++ .../Config/MapillaryCustom11ClassConfig.swift | 29 +- .../Definitions/AccessibilityFeature.swift | 1 - .../Definitions/DetectedFeature.swift | 32 +- .../EditableAccessibilityFeature.swift | 45 ++- .../MappedAccessibilityFeature.swift | 33 +- .../Contour/ContourFeatureRasterizer.swift | 7 +- .../Contour/ContourRequestProcessor.swift | 17 +- .../Image/Depth/DepthMapProcessor.swift | 82 ++++ .../Mesh/CapturedMeshSnapshot.swift | 17 +- .../Definitions/CapturedMeshDefinitions.swift | 27 ++ .../ComputerVision/Mesh/MeshPipeline.metal | 0 .../Mesh/SegmentationMeshRecord.swift | 48 +-- .../Geospatial/LocationHelpers.swift | 357 +++++++++++++++++ .../SegmentationModelRequestProcessor.swift | 1 - .../Segmentation/SegmentationARPipeline.swift | 7 +- .../SegmentationAnnotationPipeline.swift | 6 +- .../Shared/Definitions/CaptureData.swift | 85 ++-- .../Shared/Definitions/MetalContext.swift | 20 +- 34 files changed, 744 insertions(+), 843 deletions(-) create mode 100644 IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift delete mode 100644 IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift delete mode 100644 IOSAccessAssessment/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift delete mode 100644 IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Components/MeshInstancePolicy.swift (81%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift (58%) create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift (71%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Definitions/AccessibilityFeature.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Definitions/DetectedFeature.swift (74%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift (73%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift (72%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Image/Contour/ContourRequestProcessor.swift (93%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/CapturedMeshSnapshot.swift (96%) create mode 100644 PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/MeshPipeline.metal (100%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ComputerVision/Mesh/SegmentationMeshRecord.swift (92%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Shared/Definitions/CaptureData.swift (70%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Shared/Definitions/MetalContext.swift (81%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 3f58c8d8..3f82dfd3 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -17,25 +17,19 @@ A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A305B05B2E18882500ECCF9B /* DatasetEncoder.swift */; }; A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A305B06B2E18A85D00ECCF9B /* DepthCoder.swift */; }; A306462A2D614D9600B97D1B /* ImageSaver.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30646292D614D9400B97D1B /* ImageSaver.swift */; }; - A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801522EC09B1D00B1BA3A /* AccessibilityFeatureConfig.swift */; }; A308015C2EC09BB700B1BA3A /* CityscapesClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801572EC09BB700B1BA3A /* CityscapesClassConfig.swift */; }; A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801582EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift */; }; A308015E2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308015A2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift */; }; - A308015F2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308015B2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift */; }; A30801602EC09BB700B1BA3A /* VOCClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801562EC09BB700B1BA3A /* VOCClassConfig.swift */; }; A30801612EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801592EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift */; }; - A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */; }; - A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */; }; A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */; }; - A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */; }; - A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */; }; A312FD862FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD902FA3391C0044808E /* PointNMapShared.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; }; A312FD912FA3391C0044808E /* PointNMapShared.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; - A312FDCD2FA3DBD50044808E /* DepthMapProcessorExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */; }; A312FE152FA3EBE80044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; A312FE162FA3EBE80044808E /* PointNMapShaderTypes.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; + A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */; }; A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */; }; @@ -50,13 +44,9 @@ A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */; }; A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; - A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; - A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */; }; A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A355471D2EC1A47200F43AFD /* SharedAppData.swift */; }; - A35547C42EC1AF5700F43AFD /* CaptureData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C32EC1AF5500F43AFD /* CaptureData.swift */; }; A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */; }; - A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */; }; A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CB2EC3018C00F43AFD /* AnnotationView.swift */; }; A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */; }; A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */; }; @@ -77,14 +67,11 @@ A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */; }; A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */; }; A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */; }; - A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */; }; - A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */; }; A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338BE2EDA889A00F1A402 /* CustomPicker.swift */; }; A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */; }; A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3A2DD9B03000455E45 /* OSMElement.swift */; }; A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; - A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A413A52ECD86260039298C /* AccessibilityFeature.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; @@ -93,9 +80,6 @@ A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */; }; A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D752E654F14003BFE78 /* ProfileView.swift */; }; - A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */; }; - A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */; }; - A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */; }; A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3E162772F3AFC63002D4D08 /* MeshCoder.swift */; }; A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A3E6D2322F464A2700DAF88E /* PngDecoder.mm */; }; A3EE6E432F57A98A00F515E6 /* DatasetDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3EE6E422F57A98A00F515E6 /* DatasetDecoder.swift */; }; @@ -214,22 +198,16 @@ A305B05B2E18882500ECCF9B /* DatasetEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatasetEncoder.swift; sourceTree = ""; }; A305B06B2E18A85D00ECCF9B /* DepthCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthCoder.swift; sourceTree = ""; }; A30646292D614D9400B97D1B /* ImageSaver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageSaver.swift; sourceTree = ""; }; - A30801522EC09B1D00B1BA3A /* AccessibilityFeatureConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureConfig.swift; sourceTree = ""; }; A30801562EC09BB700B1BA3A /* VOCClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VOCClassConfig.swift; sourceTree = ""; }; A30801572EC09BB700B1BA3A /* CityscapesClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CityscapesClassConfig.swift; sourceTree = ""; }; A30801582EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CityscapesSubsetClassConfig.swift; sourceTree = ""; }; A30801592EC09BB700B1BA3A /* CocoCustom53ClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CocoCustom53ClassConfig.swift; sourceTree = ""; }; A308015A2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CocoCustom35ClassConfig.swift; sourceTree = ""; }; - A308015B2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MapillaryCustom11ClassConfig.swift; sourceTree = ""; }; - A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFeature.swift; sourceTree = ""; }; - A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshInstancePolicy.swift; sourceTree = ""; }; A308016B2EC15CBA00B1BA3A /* AccessibilityFeatureAttributeExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureAttributeExtension.swift; sourceTree = ""; }; - A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditableAccessibilityFeature.swift; sourceTree = ""; }; - A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedAccessibilityFeature.swift; sourceTree = ""; }; A312FD7B2FA3391B0044808E /* PointNMapShared.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShared.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PointNMapSharedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DepthMapProcessorExtension.swift; sourceTree = ""; }; A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShaderTypes.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureKindExtension.swift; sourceTree = ""; }; A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWGeometry.swift; sourceTree = ""; }; @@ -244,13 +222,9 @@ A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEndpoint.swift; sourceTree = ""; }; A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEnvironment.swift; sourceTree = ""; }; A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; - A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationHelpersExtension.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; - A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourRequestProcessor.swift; sourceTree = ""; }; A355471D2EC1A47200F43AFD /* SharedAppData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppData.swift; sourceTree = ""; }; - A35547C32EC1AF5500F43AFD /* CaptureData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureData.swift; sourceTree = ""; }; A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappedFeaturesData.swift; sourceTree = ""; }; - A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturedMeshSnapshot.swift; sourceTree = ""; }; A35547CB2EC3018C00F43AFD /* AnnotationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationView.swift; sourceTree = ""; }; A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageViewController.swift; sourceTree = ""; }; A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceSelectionView.swift; sourceTree = ""; }; @@ -273,14 +247,11 @@ A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureSnapshot.swift; sourceTree = ""; }; A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureClassSnapshot.swift; sourceTree = ""; }; A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppContext.swift; sourceTree = ""; }; - A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturedMeshDefinitions.swift; sourceTree = ""; }; - A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContourFeatureRasterizer.swift; sourceTree = ""; }; A38338BE2EDA889A00F1A402 /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationFeatureDetailView.swift; sourceTree = ""; }; A39C9F3A2DD9B03000455E45 /* OSMElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMElement.swift; sourceTree = ""; }; A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; - A3A413A52ECD86260039298C /* AccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeature.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; @@ -288,10 +259,7 @@ A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceViewModel.swift; sourceTree = ""; }; A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; - A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = MeshPipeline.metal; sourceTree = ""; }; A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; - A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationMeshRecord.swift; sourceTree = ""; }; - A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalContext.swift; sourceTree = ""; }; A3E162772F3AFC63002D4D08 /* MeshCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshCoder.swift; sourceTree = ""; }; A3E6D2312F4649AD00DAF88E /* PngDecoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngDecoder.h; sourceTree = ""; }; A3E6D2322F464A2700DAF88E /* PngDecoder.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PngDecoder.mm; sourceTree = ""; }; @@ -333,9 +301,6 @@ isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, - Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift, - Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift, - Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift, ); platformFiltersByRelativePath = { PointNMapShared.h = ( @@ -359,9 +324,6 @@ isa = PBXFileSystemSynchronizedBuildFileExceptionSet; membershipExceptions = ( Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, - Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift, - Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift, - Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift, ); target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; }; @@ -460,7 +422,6 @@ A30801512EC0984F00B1BA3A /* AccessibilityFeature */, A305B05A2E1887AE00ECCF9B /* LocalDataset */, A34B70CC2DDFE638007B191F /* ARCamera */, - A34509E22FA31DCC003157B0 /* Geospatial */, A3E84ECE2DDAC7980096A645 /* Annotation */, A33EB5AE2F761C83008ABFB7 /* ComputerVision */, 55659C092BB785EA0094DF01 /* MachineLearning */, @@ -554,20 +515,10 @@ path = LocalDataset; sourceTree = ""; }; - A308014D2EC091E400B1BA3A /* Contour */ = { - isa = PBXGroup; - children = ( - A37E721C2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift */, - A35547142EC198A600F43AFD /* ContourRequestProcessor.swift */, - ); - path = Contour; - sourceTree = ""; - }; A30801512EC0984F00B1BA3A /* AccessibilityFeature */ = { isa = PBXGroup; children = ( A38338C32EDAF25400F1A402 /* Attributes */, - A35E050B2EDE359C003C26CF /* AttributeEstimation */, A30801662EC0AE6B00B1BA3A /* Components */, A30801622EC0A89E00B1BA3A /* Definitions */, A30801552EC09BA200B1BA3A /* Config */, @@ -578,8 +529,7 @@ A30801552EC09BA200B1BA3A /* Config */ = { isa = PBXGroup; children = ( - A30801522EC09B1D00B1BA3A /* AccessibilityFeatureConfig.swift */, - A308015B2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift */, + A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */, A329433D2EE7BF0200C4C1BC /* Others */, ); path = Config; @@ -588,10 +538,6 @@ A30801622EC0A89E00B1BA3A /* Definitions */ = { isa = PBXGroup; children = ( - A30801632EC0A8A600B1BA3A /* DetectedFeature.swift */, - A3A413A52ECD86260039298C /* AccessibilityFeature.swift */, - A30C67E52EE2732D006E4321 /* EditableAccessibilityFeature.swift */, - A30C67E72EE27336006E4321 /* MappedAccessibilityFeature.swift */, ); path = Definitions; sourceTree = ""; @@ -599,20 +545,11 @@ A30801662EC0AE6B00B1BA3A /* Components */ = { isa = PBXGroup; children = ( - A30801672EC0AE7200B1BA3A /* MeshInstancePolicy.swift */, A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */, ); path = Components; sourceTree = ""; }; - A30BED3D2ED2F614004A5B51 /* Definitions */ = { - isa = PBXGroup; - children = ( - A37E72172ED95D0100CFE4EF /* CapturedMeshDefinitions.swift */, - ); - path = Definitions; - sourceTree = ""; - }; A312FE1F2FA3EC710044808E /* Frameworks */ = { isa = PBXGroup; children = ( @@ -663,18 +600,10 @@ isa = PBXGroup; children = ( DAA7F8C62CA76514003666D8 /* Image */, - A3DA4DA62EB9320E005BB812 /* Mesh */, ); path = ComputerVision; sourceTree = ""; }; - A3431E002F26FA0C00B96610 /* Extensions */ = { - isa = PBXGroup; - children = ( - ); - path = Extensions; - sourceTree = ""; - }; A34509DB2FA1A7A7003157B0 /* Utils */ = { isa = PBXGroup; children = ( @@ -683,14 +612,6 @@ path = Utils; sourceTree = ""; }; - A34509E22FA31DCC003157B0 /* Geospatial */ = { - isa = PBXGroup; - children = ( - A34509E02FA31DCC003157B0 /* LocationHelpersExtension.swift */, - ); - path = Geospatial; - sourceTree = ""; - }; A34B70CC2DDFE638007B191F /* ARCamera */ = { isa = PBXGroup; children = ( @@ -704,14 +625,6 @@ path = ARCamera; sourceTree = ""; }; - A35E050B2EDE359C003C26CF /* AttributeEstimation */ = { - isa = PBXGroup; - children = ( - A3431E002F26FA0C00B96610 /* Extensions */, - ); - path = AttributeEstimation; - sourceTree = ""; - }; A35E05112EDE7435003C26CF /* Config */ = { isa = PBXGroup; children = ( @@ -842,10 +755,8 @@ isa = PBXGroup; children = ( A3EE6E452F57FE6200F515E6 /* AppMode.swift */, - A35547C32EC1AF5500F43AFD /* CaptureData.swift */, A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */, A374B4AB2F8EF654003E030D /* CurrentMappingData.swift */, - A3DA4DBD2EBCB9F9005BB812 /* MetalContext.swift */, ); path = Definitions; sourceTree = ""; @@ -867,14 +778,6 @@ path = Attributes; sourceTree = ""; }; - A38338C42EDAF3DC00F1A402 /* Depth */ = { - isa = PBXGroup; - children = ( - A312FDCC2FA3DBD10044808E /* DepthMapProcessorExtension.swift */, - ); - path = Depth; - sourceTree = ""; - }; A39C9F392DD9B01200455E45 /* TDEI */ = { isa = PBXGroup; children = ( @@ -932,17 +835,6 @@ path = Helpers; sourceTree = ""; }; - A3DA4DA62EB9320E005BB812 /* Mesh */ = { - isa = PBXGroup; - children = ( - A30BED3D2ED2F614004A5B51 /* Definitions */, - A35547C92EC2045F00F43AFD /* CapturedMeshSnapshot.swift */, - A3DA4DBB2EBCB87E005BB812 /* SegmentationMeshRecord.swift */, - A3DA4DAD2EB98D70005BB812 /* MeshPipeline.metal */, - ); - path = Mesh; - sourceTree = ""; - }; A3DA4DC12EBE87B6005BB812 /* Utils */ = { isa = PBXGroup; children = ( @@ -1017,8 +909,6 @@ DAA7F8C62CA76514003666D8 /* Image */ = { isa = PBXGroup; children = ( - A38338C42EDAF3DC00F1A402 /* Depth */, - A308014D2EC091E400B1BA3A /* Contour */, A362AEC72DB59577002D7598 /* Helpers */, ); path = Image; @@ -1284,7 +1174,6 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - A3DA4DAE2EB98D70005BB812 /* MeshPipeline.metal in Sources */, A3FE16632E18BAEB00DAE5BE /* ConfidenceEncoder.swift in Sources */, A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */, A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */, @@ -1299,10 +1188,8 @@ A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */, A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */, A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */, - A35547C42EC1AF5700F43AFD /* CaptureData.swift in Sources */, A3EE6E462F57FE6400F515E6 /* AppMode.swift in Sources */, A308015E2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift in Sources */, - A308015F2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift in Sources */, A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */, A3E6D2332F464A2D00DAF88E /* PngDecoder.mm in Sources */, A30801602EC09BB700B1BA3A /* VOCClassConfig.swift in Sources */, @@ -1312,12 +1199,10 @@ A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */, - A312FDCD2FA3DBD50044808E /* DepthMapProcessorExtension.swift in Sources */, A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */, A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */, CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, - A3DA4DBC2EBCB881005BB812 /* SegmentationMeshRecord.swift in Sources */, A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */, A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */, @@ -1325,30 +1210,24 @@ A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */, A3FE16612E18BA5900DAE5BE /* RGBCoder.swift in Sources */, CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */, - A3DA4DBE2EBCB9F9005BB812 /* MetalContext.swift in Sources */, A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */, A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */, A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */, A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */, - A30801532EC09B2600B1BA3A /* AccessibilityFeatureConfig.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, - A34509E32FA31DCC003157B0 /* LocationHelpersExtension.swift in Sources */, - A30C67E62EE27331006E4321 /* EditableAccessibilityFeature.swift in Sources */, - A37E72182ED95D0600CFE4EF /* CapturedMeshDefinitions.swift in Sources */, A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */, A3F38C4C2D38A2C700900547 /* DepthModel.swift in Sources */, 55659C102BB7863F0094DF01 /* SetupView.swift in Sources */, A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, - A30801682EC0AE7700B1BA3A /* MeshInstancePolicy.swift in Sources */, + A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */, DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */, A3EE6E4C2F580E2B00F515E6 /* DatasetLister.swift in Sources */, A3FE16652E18C54000DAE5BE /* CameraTransformCoder.swift in Sources */, A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */, CAA9477B2CDE70D9000C6918 /* KeychainService.swift in Sources */, A32943552EE8186E00C4C1BC /* OSWPoint.swift in Sources */, - A37E721D2ED99C0A00CFE4EF /* ContourFeatureRasterizer.swift in Sources */, A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */, CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, @@ -1362,9 +1241,7 @@ A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */, A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */, A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, - A3A413A62ECD862B0039298C /* AccessibilityFeature.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, - A30C67E82EE2733B006E4321 /* MappedAccessibilityFeature.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, @@ -1374,8 +1251,6 @@ A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */, DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */, - A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */, - A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */, A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, @@ -1391,7 +1266,6 @@ A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, - A35547CA2EC2045F00F43AFD /* CapturedMeshSnapshot.swift in Sources */, DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift new file mode 100644 index 00000000..cdffea23 --- /dev/null +++ b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift @@ -0,0 +1,22 @@ +// +// AccessibilityFeatureKindExtension.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// + +import PointNMapShared + +extension AccessibilityFeatureKind { + var oswPolicy: OSWPolicy { + switch self { + case .sidewalk: return OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true) + case .building: return OSWPolicy(oswElementClass: .Building, isExistingFirst: false) + case .pole: return OSWPolicy(oswElementClass: .Pole, isExistingFirst: false) + case .trafficLight: return OSWPolicy(oswElementClass: .TrafficLight, isExistingFirst: false) + case .trafficSign: return OSWPolicy(oswElementClass: .TrafficSign, isExistingFirst: false) + case .vegetation: return OSWPolicy(oswElementClass: .Vegetation, isExistingFirst: false) + default: return OSWPolicy.default + } + } +} diff --git a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift b/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift deleted file mode 100644 index 33a4241e..00000000 --- a/IOSAccessAssessment/ComputerVision/Image/Depth/DepthMapProcessorExtension.swift +++ /dev/null @@ -1,92 +0,0 @@ -// -// DepthMapProcessorExtension.swift -// IOSAccessAssessment -// -// Created by Himanshu on 4/30/26. -// - -import PointNMapShared - -extension DepthMapProcessor { - /** - Retrieves the depth value at the centroid of the given accessibility feature. - - - Parameters: - - accessibilityFeature: The AccessibilityFeature object containing the detected feature. - - - Returns: The depth value at the centroid of the feature. - - - Throws: DepthMapProcessorError.unableToAccessDepthData if depth data cannot be accessed. - DepthMapProcessorError.invalidDepth if the retrieved depth value is invalid. - - - Note: The centroid coordinates are normalized (0 to 1) and need to be converted to pixel coordinates. - */ - func getFeatureDepthAtCentroid(detectedFeature: any DetectedFeatureProtocol) throws -> Float { - let featureContourDetails = detectedFeature.contourDetails - let featureCentroid = featureContourDetails.centroid - - let featureCentroidPoint: CGPoint = CGPoint( - x: featureCentroid.x * CGFloat(depthWidth), - y: (1 - featureCentroid.y) * CGFloat(depthHeight) - ) - return try getDepthAtPoint(point: featureCentroidPoint) - } - - /** - Retrieves the average depth value within a specified radius around the centroid of the given accessibility feature. - - - Parameters: - - accessibilityFeature: The AccessibilityFeature object containing the detected feature. - - radius: The radius (in pixels) around the centroid to consider for averaging depth values. Default is 5 pixels. - - - Returns: The average depth value within the specified radius around the feature's centroid. - - - Throws: DepthMapProcessorError.unableToAccessDepthData if depth data cannot be accessed. - DepthMapProcessorError.invalidDepth if no valid depth values are found within the radius. - - - Note: The centroid coordinates are normalized (0 to 1) and need to be converted to pixel coordinates. - */ - func getFeatureDepthAtCentroidInRadius(detectedFeature: any DetectedFeatureProtocol, radius: CGFloat = 5) throws -> Float { - let featureContourDetails = detectedFeature.contourDetails - let featureCentroid = featureContourDetails.centroid - - var pointDeltas: [CGPoint] = [] - for xDelta in stride(from: -radius, through: radius, by: 1) { - for yDelta in stride(from: -radius, through: radius, by: 1) { - let distance = sqrt(xDelta * xDelta + yDelta * yDelta) - if distance <= radius { - pointDeltas.append(CGPoint(x: xDelta, y: yDelta)) - } - } - } - - let featureCentroidRadiusPoints: [CGPoint] = pointDeltas.map { delta in - CGPoint( - x: featureCentroid.x * CGFloat(depthWidth) + delta.x, - /// Symmetry in circle ensures that we do not worry about the sign of delta.y here - y: (1 - featureCentroid.y) * CGFloat(depthHeight) + delta.y - ) - } - let depths = try getDepthsAtPoints(points: featureCentroidRadiusPoints) - let validDepths = depths.filter { $0.isFinite && $0 > 0 } - guard !validDepths.isEmpty else { - throw DepthMapProcessorError.invalidDepth - } - let averageDepth = validDepths.reduce(0, +) / Float(validDepths.count) - return averageDepth - } - - func getFeatureDepthsAtBounds(detectedFeature: any DetectedFeatureProtocol) throws -> [Float] { - let featureContourDetails = detectedFeature.contourDetails - let normalizedPoints: [SIMD2] = featureContourDetails.normalizedPoints - - let featureBoundPoints: [CGPoint] = normalizedPoints.map { point in - CGPoint( - x: CGFloat(point.x * Float(depthWidth)), - y: CGFloat((1 - point.y) * Float(depthHeight)) - ) - } - let depths = try getDepthsAtPoints(points: featureBoundPoints) - return depths - } -} diff --git a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift b/IOSAccessAssessment/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift deleted file mode 100644 index 074b2dd5..00000000 --- a/IOSAccessAssessment/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift +++ /dev/null @@ -1,27 +0,0 @@ -// -// CapturedMeshDefinitions.swift -// IOSAccessAssessment -// -// Created by Himanshu on 11/27/25. -// -import Foundation -import ARKit - -struct CapturedMeshAnchorSnapshot: Sendable { - let vertexData: Data - let indexData: Data - - let vertexCount: Int - let indexCount: Int -} - -struct CapturedMeshSnapshot: Sendable { - let anchors: [AccessibilityFeatureClass: CapturedMeshAnchorSnapshot] - - let vertexStride: Int - let vertexOffset: Int - let indexStride: Int - let classificationStride: Int - - let totalVertexCount: Int -} diff --git a/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift b/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift deleted file mode 100644 index c2acd72b..00000000 --- a/IOSAccessAssessment/Geospatial/LocationHelpersExtension.swift +++ /dev/null @@ -1,371 +0,0 @@ -// -// LocationHelpers.swift -// IOSAccessAssessment -// -// Created by Himanshu on 3/30/26. -// - -import CoreLocation -import UIKit -import MapKit -import PointNMapShared - -public extension LocationHelpers { - /** - Calculates the distance between two locations represented by their location details if they have similar geometry types. - Not commutative, checks distance from source to destination, so the order of the parameters matters. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - First, checks the geometry types of the source and destination location details (e.g., point, linestring, polygon) based on the properties of their last location element. Then, based on the geometry types, it calls the appropriate distance calculation method (e.g., distanceBetweenPoints, distanceFromPointToLineString, distanceFromPointToPolygon, distanceBetweenLineStrings, distanceFromLineStringToPolygon, distanceBetweenPolygons) to compute the distance between the two locations. - */ - static func distanceBetweenSimilarOSMLocationDetails( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLastLocationElement = srcLocationDetails.locations.last else { - return nil - } -// let isSrcMultipolygon = srcLocationDetails.locations.count > 1 - let isSrcPolygon = srcLastLocationElement.isWay && srcLastLocationElement.isClosed // && (!isSrcMultipolygon) - let isSrcLineString = srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) - let isSrcPoint = !srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) - - guard let dstLastLocationElement = dstLocationDetails.locations.last else { - return nil - } -// let isDstMultipolygon = dstLocationDetails.locations.count > 1 - let isDstPolygon = dstLastLocationElement.isWay && dstLastLocationElement.isClosed // && (!isDstMultipolygon) - let isDstLineString = dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) - let isDstPoint = !dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) - - if isSrcPoint && isDstPoint { - return distanceBetweenPoints(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) - } else if isSrcLineString && isDstLineString { - return distanceBetweenLineStrings(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) - } else if isSrcPolygon && isDstPolygon { - return distanceBetweenPolygons(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) - } else { - return nil - } - } - - /** - Calculates the distance between two points represented by their location details. The distance is returned in meters. - Unit of distance is determined by MapKit's MKMapPoint. - */ - static func distanceBetweenPoints( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == false, srcLocationElement.isClosed == false, - let srcLocationCoordinate = srcLocationElement.coordinates.last, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == false, dstLocationElement.isClosed == false, - let dstLocationCoordinate = dstLocationElement.coordinates.last else { - return nil - } - let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) - let dstLocation = CLLocation(latitude: dstLocationCoordinate.latitude, longitude: dstLocationCoordinate.longitude) - return MKDistanceHelpers.distanceBetweenPoints(srcPoint: MKMapPoint(srcLocationCoordinate), dstPoint: MKMapPoint(dstLocationCoordinate)) - } - - /** - Calculates the shortest distance from a point to a linestring represented by their location details. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the linestring into map points, then iterates through each line segment of the linestring and calculates the distance from the point to that line segment using the distanceFromPointToLineSegment method. The minimum distance found across all segments is returned as the distance from the point to the linestring. - */ - static func distanceFromPointToLineString( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == false, srcLocationElement.isClosed == false, - let srcLocationCoordinate = srcLocationElement.coordinates.last, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { - return nil - } - let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) - let srcMapPoint = MKMapPoint(srcLocationCoordinate) - let dstLocationCoordinates = dstLocationElement.coordinates - let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } - var minDistance: Double = Double.infinity - for i in 0..<(dstMapPoints.count - 1) { - let lineStart = dstMapPoints[i] - let lineEnd = dstMapPoints[i + 1] - if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( - srcPoint: srcMapPoint, lineStart: lineStart, lineEnd: lineEnd - ) { - minDistance = min(minDistance, distance) - } - } - return minDistance - } - - /** - Calculates the shortest distance from a point to a polygon (single polygon) represented by their location details. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the polygon into map points, then iterates through each edge of the polygon and calculates the distance from the point to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges is returned as the distance from the point to the polygon. If the point is inside the polygon, the distance returned is 0. - */ - static func distanceFromPointToPolygon( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == false, srcLocationElement.isClosed == false, - let srcLocationCoordinate = srcLocationElement.coordinates.last, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { - return nil - } - let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) - let srcMapPoint = MKMapPoint(srcLocationCoordinate) - let dstLocationCoordinates = dstLocationElement.coordinates - let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } - return MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcMapPoint, polygonPoints: dstMapPoints) - } - -// static func distanceFromPointToMultiPolygon( -// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails -// ) -> Double? { -// var minDistance: Double = Double.infinity -// dstLocationDetails.locations.forEach { locationElement in -// guard locationElement.isWay == true, locationElement.isClosed == true else { -// return -// } -// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) -// if let distance = distanceFromPointToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { -// minDistance = min(minDistance, distance) -// } -// } -// return minDistance -// } - - /** - Calculates the shortest distance between two linestrings represented by their location details. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the linestrings into map points, then iterates through each line segment of the dst linestring and calculates the distance from each point in the source linestring to that line segment. The minimum distance found across all segments and points is returned as the distance between the two linestrings. - - - Warning: - The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two linestrings may partially overlap with each other, and the distance should reflect how much of the linestrings are outside of each other rather than just indicating that there is some overlap. - */ - static func distanceBetweenLineStrings( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == true, srcLocationElement.isClosed == false, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { - return nil - } - let srcLocationCoordinates = srcLocationElement.coordinates - let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } - let dstLocationCoordinates = dstLocationElement.coordinates - let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } - - var minDistance: Double = Double.infinity - for i in 0..<(dstMapPoints.count - 1) { - let lineStart = dstMapPoints[i] - let lineEnd = dstMapPoints[i + 1] - for srcPoint in srcMapPoints { - if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( - srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd - ) { - minDistance = min(minDistance, distance) - } - } - } - return minDistance - } - - /** - Calculates the shortest distance from a linestring to a polygon (single polygon) represented by their location details. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the linestring and polygon into map points, then iterates through each edge of the polygon and calculates the distance from each point in the linestring to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance from the linestring to the polygon. If any point of the linestring is inside the polygon, the distance returned is 0. - - - Warning: - The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, a linestring may partially overlap with a polygon, and the distance should reflect how much of the linestring is outside the polygon rather than just indicating that there is some overlap. - */ - static func distanceFromLineStringToPolygon( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == true, srcLocationElement.isClosed == false, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { - return nil - } - let srcLocationCoordinates = srcLocationElement.coordinates - let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } - let dstLocationCoordinates = dstLocationElement.coordinates - let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } - - var minDistance: Double = Double.infinity - for i in 0..<(dstMapPoints.count - 1) { - let lineStart = dstMapPoints[i] - let lineEnd = dstMapPoints[i + 1] - for srcPoint in srcMapPoints { - if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( - srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd - ) { - minDistance = min(minDistance, distance) - } - } - } - return minDistance - } - -// static func distanceFromLineStringToMultiPolygon( -// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails -// ) -> Double? { -// var minDistance: Double = Double.infinity -// dstLocationDetails.locations.forEach { locationElement in -// guard locationElement.isWay == true, locationElement.isClosed == true else { -// return -// } -// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) -// if let distance = distanceFromLineStringToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { -// minDistance = min(minDistance, distance) -// } -// } -// return minDistance -// } - - /** - Calculates the shortest distance between two polygons (single polygons) represented by their location details. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the polygons into map points, then iterates through each edge of the first polygon and calculates the distance from each point in the second polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. If any point of one polygon is inside the other polygon, the distance returned is 0. - - - Warning: - The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. - */ - static func distanceBetweenPolygons( - srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails - ) -> Double? { - guard let srcLocationElement = srcLocationDetails.locations.last, - srcLocationElement.isWay == true, srcLocationElement.isClosed == true, - let dstLocationElement = dstLocationDetails.locations.last, - dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { - return nil - } - let srcLocationCoordinates = srcLocationElement.coordinates - let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } - let dstLocationCoordinates = dstLocationElement.coordinates - let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } - let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } - - var minDistance: Double = Double.infinity - for srcPoint in srcMapPoints { - if let distance = MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcPoint, polygonPoints: dstMapPoints) { - minDistance = min(minDistance, distance) - } - } - return minDistance - } - - /** - Calculates the shortest distance between two polygons represented by their location details. - Can have negative distance if there is polygon overlap, where the absolute value of negative distances represents the degree of overlap. - Unit of distance is determined by MapKit's MKMapPoint. - - - Note: - Converts the coordinates of the polygons into map points, then iterates through each edge of the source polygon and calculates the distance from each point in the destination polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. - - - Warning: - The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. - - - Warning: - Currently, this algorithm doesn't actually consider the relation role of each multi-polygon member (e.g. outer vs inner), which can lead to inaccurate distance calculations in some cases. For example, if one of the multi-polygons has an inner member that overlaps with the other multi-polygon, the distance should be negative to reflect the degree of overlap. However, without considering the relation type, the algorithm may simply return a distance of 0 for this case, which does not accurately capture the spatial relationship between the two multi-polygons. - */ -// static func distanceBetweenMultiPolygons( -// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails -// ) -> Double? { -// let srcLocationCoordinateArrays = srcLocationDetails.locations -// let dstLocationCoordinateArrays = dstLocationDetails.locations -// guard srcLocationCoordinateArrays.count > 0, dstLocationCoordinateArrays.count > 0 else { -// return nil -// } -// -// var minDistance: Double = Double.infinity -// for srcLocationCoordinateArray in srcLocationCoordinateArrays { -// for dstLocationCoordinateArray in dstLocationCoordinateArrays { -// let srcOSMLocationDetails = LocationDetails(locations: [srcLocationCoordinateArray]) -// let dstOSMLocationDetails = LocationDetails(locations: [dstLocationCoordinateArray]) -// /// While deciding the geometry, we are not using the .polygon enumeration, since that actually represents a multipolygon in OSW. -// let srcGeometry: OSWGeometry = srcLocationCoordinateArray.isWay ? .linestring : .point -// let isSrcPolygon = srcLocationCoordinateArray.isWay && srcLocationCoordinateArray.isClosed -// let dstGeometry: OSWGeometry = dstLocationCoordinateArray.isWay ? .linestring : .point -// let isDstPolygon = dstLocationCoordinateArray.isWay && dstLocationCoordinateArray.isClosed -// -// /// Must ensure the same units (in this case, decided by MKMapPoint) -// if (srcGeometry == .point && dstGeometry == .point) { -// guard let distance = distanceBetweenPoints( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// minDistance = min(minDistance, distance) -// } -// else if (srcGeometry == .point && (dstGeometry == .linestring && !isDstPolygon)) { -// guard let distance = distanceFromPointToLineString( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// minDistance = min(minDistance, distance) -// } -// else if (srcGeometry == .point && (dstGeometry == .linestring && isDstPolygon)) { -// guard let distance = distanceFromPointToPolygon( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// minDistance = min(minDistance, distance) -// } -// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && !isDstPolygon)) { -// guard let distance = distanceBetweenLineStrings( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// minDistance = min(minDistance, distance) -// } -// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { -// guard let distance = distanceFromLineStringToPolygon( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// minDistance = min(minDistance, distance) -// } -// else if ((srcGeometry == .linestring && isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { -// guard let distance = distanceBetweenPolygons( -// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails -// ) else { -// continue -// } -// } -// else { -// continue -// } -// } -// } -// return minDistance -// } -} - diff --git a/IOSAccessAssessment/Shared/SharedAppConstants.swift b/IOSAccessAssessment/Shared/SharedAppConstants.swift index f684a011..67dcdc39 100644 --- a/IOSAccessAssessment/Shared/SharedAppConstants.swift +++ b/IOSAccessAssessment/Shared/SharedAppConstants.swift @@ -6,6 +6,7 @@ // import SwiftUI +import PointNMapShared /** Global SharedAppConstants used across the app. diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index cd52d03b..78c801a6 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -6,9 +6,10 @@ // import SwiftUI +import Combine import CoreLocation import MapKit -import PointNMapShared +import PointNMapShaderTypes public enum AttributeEstimationPipelineError: Error, LocalizedError { case configurationError(String) @@ -216,7 +217,7 @@ public class AttributeEstimationPipeline: ObservableObject { accessibilityFeature: EditableAccessibilityFeature ) { /// Threshold needs to be in Map Units - let distanceThreshold = SharedAppConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) + let distanceThreshold = PointNMapConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) guard let LocationDetails = accessibilityFeature.locationDetails else { accessibilityFeature.setIsExisting(false) return diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift index 8ce3cf2d..10e94187 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift @@ -6,7 +6,7 @@ // import SwiftUI import CoreLocation -import PointNMapShared +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func getLocationFromImageByCentroid( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift index 93f2a962..4041c608 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift @@ -7,7 +7,7 @@ import SwiftUI import CoreLocation -import PointNMapShared +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func getLocationFromMeshForLineStringByPlane( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift index ed8aab81..92199a25 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateCrossSlope( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift index 5867294d..5ac20098 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateRunningSlope( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift index 3eb73a87..dbc8b391 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateSurfaceIntegrity( @@ -103,7 +104,7 @@ public extension AttributeEstimationPipeline { } /// Run damage detection let cameraImage = captureImageData.cameraImage - let croppedSize = SharedAppConstants.DamageDetectionConstants.inputSize + let croppedSize = PointNMapConstants.DamageDetectionConstants.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: captureImageData.interfaceOrientation ) diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift index bc2a4e56..d96f220d 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateWidth( diff --git a/IOSAccessAssessment/AccessibilityFeature/Components/MeshInstancePolicy.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/MeshInstancePolicy.swift similarity index 81% rename from IOSAccessAssessment/AccessibilityFeature/Components/MeshInstancePolicy.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/MeshInstancePolicy.swift index 93282dbf..2c468b49 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Components/MeshInstancePolicy.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/MeshInstancePolicy.swift @@ -16,14 +16,14 @@ TODO: Instead of using only the number of polygons for minClusterSize, we can also consider using the total area of the polygons in the cluster. */ -struct MeshInstancePolicy: Sendable, Codable, Equatable, Hashable { - let clusterDistanceThreshold: Float - let minClusterSize: Int - let meshClusteringDimensions: Set +public struct MeshInstancePolicy: Sendable, Codable, Equatable, Hashable { + public let clusterDistanceThreshold: Float + public let minClusterSize: Int + public let meshClusteringDimensions: Set - let maxClustersToConsider: Int? + public let maxClustersToConsider: Int? - init( + public init( clusterDistanceThreshold: Float, minClusterSize: Int, meshClusteringDimensions: Set, maxClustersToConsider: Int? = nil ) { @@ -34,7 +34,7 @@ struct MeshInstancePolicy: Sendable, Codable, Equatable, Hashable { } } -extension MeshInstancePolicy { +public extension MeshInstancePolicy { static let `default` = MeshInstancePolicy( clusterDistanceThreshold: 0.05, minClusterSize: 10, diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift similarity index 58% rename from IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift index 3d21bc06..634faced 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift @@ -7,56 +7,59 @@ import CoreImage import ARKit -import PointNMapShared -struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, CustomStringConvertible { - let id: String - let name: String +public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, CustomStringConvertible { + public let id: String + public let name: String + public let kind: AccessibilityFeatureKind? /** Segmentation-related SharedAppConstants */ /// Grayscale value output for the accessibility feature class, by the relevant segmentation model - let grayscaleValue: Float + public let grayscaleValue: Float /// Pre-defined label of the accessibility feature class - let labelValue: UInt8 + public let labelValue: UInt8 /// Color to be assigned for visualization of the segmentation class during post-processing - let color: CIColor + public let color: CIColor /** SharedAppConstants related to mesh */ /// Optional mesh classification for the segmentation class - let meshClassification: Set + public let meshClassification: Set /** Post-Processing related SharedAppConstants. */ /// Optional bounds for the segmentation class. Is kept optional to prevent unnecessary dimension based masking. - let bounds: CGRect? + public let bounds: CGRect? /// Properties for union of masks - let unionOfMasksPolicy: UnionOfMasksPolicy + public let unionOfMasksPolicy: UnionOfMasksPolicy /// Properties related to mesh post-processing - let meshInstancePolicy: MeshInstancePolicy + public let meshInstancePolicy: MeshInstancePolicy /// Attributes associated with the accessibility feature class - let attributes: Set + public let attributes: Set /// Experimental attributes associated with the accessibility feature class - let experimentalAttributes: Set + public let experimentalAttributes: Set /** Mapping-related SharedAppConstants */ - let oswPolicy: OSWPolicy +// public let oswPolicy: OSWPolicy - init(id: String, name: String, grayscaleValue: Float, labelValue: UInt8, color: CIColor, - bounds: CGRect? = nil, unionOfMasksPolicy: UnionOfMasksPolicy = .default, - meshClassification: Set = [], meshInstancePolicy: MeshInstancePolicy = .default, - attributes: Set = [], - experimentalAttributes: Set = [], - oswPolicy: OSWPolicy = .default + public init( + id: String, name: String, kind: AccessibilityFeatureKind? = nil, + grayscaleValue: Float, labelValue: UInt8, color: CIColor, + bounds: CGRect? = nil, unionOfMasksPolicy: UnionOfMasksPolicy = .default, + meshClassification: Set = [], meshInstancePolicy: MeshInstancePolicy = .default, + attributes: Set = [], + experimentalAttributes: Set = [], +// oswPolicy: OSWPolicy = .default ) { self.id = id self.name = name + self.kind = kind self.grayscaleValue = grayscaleValue self.labelValue = labelValue self.color = color @@ -66,36 +69,36 @@ struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, self.meshInstancePolicy = meshInstancePolicy self.attributes = attributes self.experimentalAttributes = experimentalAttributes - self.oswPolicy = oswPolicy +// self.oswPolicy = oswPolicy } - static func < (lhs: AccessibilityFeatureClass, rhs: AccessibilityFeatureClass) -> Bool { + public static func < (lhs: AccessibilityFeatureClass, rhs: AccessibilityFeatureClass) -> Bool { return lhs.labelValue < rhs.labelValue } - var description: String { + public var description: String { return "AccessibilityFeatureClass(id: \(id), name: \(name), grayscaleValue: \(grayscaleValue), labelValue: \(labelValue), color: \(color))" } } -struct AccessibilityFeatureClassConfig { - let modelURL: URL? - let classes: [AccessibilityFeatureClass] - let inputSize: CGSize +public struct AccessibilityFeatureClassConfig { + public let modelURL: URL? + public let classes: [AccessibilityFeatureClass] + public let inputSize: CGSize - var classNames: [String] { + public var classNames: [String] { return classes.map { $0.name } } - var grayscaleValues: [Float] { + public var grayscaleValues: [Float] { return classes.map { $0.grayscaleValue } } - var labels: [UInt8] { + public var labels: [UInt8] { return classes.map { $0.labelValue } } - var labelToClassMap: [UInt8: AccessibilityFeatureClass] { + public var labelToClassMap: [UInt8: AccessibilityFeatureClass] { var map: [UInt8: AccessibilityFeatureClass] = [:] for cls in classes { map[cls.labelValue] = cls @@ -103,7 +106,7 @@ struct AccessibilityFeatureClassConfig { return map } - var labelToIndexMap: [UInt8: Int] { + public var labelToIndexMap: [UInt8: Int] { var map: [UInt8: Int] = [:] for (index, cls) in classes.enumerated() { map[cls.labelValue] = index @@ -112,7 +115,7 @@ struct AccessibilityFeatureClassConfig { } // Retrieve grayscale-to-class mapping as [UInt8: String] - var labelToClassNameMap: [UInt8: String] { + public var labelToClassNameMap: [UInt8: String] { var map: [UInt8: String] = [:] for cls in classes { map[cls.labelValue] = cls.name @@ -120,11 +123,11 @@ struct AccessibilityFeatureClassConfig { return map } - var colors: [CIColor] { + public var colors: [CIColor] { return classes.map { $0.color } } - var labelToColorMap: [UInt8: CIColor] { + public var labelToColorMap: [UInt8: CIColor] { var map: [UInt8: CIColor] = [:] for cls in classes { map[cls.labelValue] = cls.color @@ -133,6 +136,6 @@ struct AccessibilityFeatureClassConfig { } } -enum AccessibilityFeatureConfig { +public enum AccessibilityFeatureConfig { /// Configurations for the segmentation model. Added in separate files. } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift new file mode 100644 index 00000000..bf492ed7 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift @@ -0,0 +1,22 @@ +// +// AccessibilityFeatureKind.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// + +/** + AccessibilityFeatureKind refers to the semantic type of the feature that you would find in an environment where the feature is being captured. + */ +public enum AccessibilityFeatureKind: String, Identifiable, Codable, CaseIterable, Equatable, Sendable { + case sidewalk = "sidewalk" + case building = "building" + case pole = "pole" + case trafficLight = "traffic_light" + case trafficSign = "traffic_sign" + case vegetation = "vegetation" + + public var id: String { + return self.rawValue + } +} diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift similarity index 71% rename from IOSAccessAssessment/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift index 70bcf35b..94f2f44c 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift @@ -6,8 +6,9 @@ // import Foundation import CoreImage +import ARKit -extension AccessibilityFeatureConfig { +public extension AccessibilityFeatureConfig { static let mapillaryCustom11Config: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( modelURL: Bundle.main.url(forResource: "bisenetv2_11_640_640", withExtension: "mlmodelc"), classes: [ @@ -18,7 +19,8 @@ extension AccessibilityFeatureConfig { // )), AccessibilityFeatureClass( - id: "sidewalk", name: "Sidewalk", grayscaleValue: 1.0 / 255.0, labelValue: 1, + id: "sidewalk", name: "Sidewalk", kind: .sidewalk, + grayscaleValue: 1.0 / 255.0, labelValue: 1, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), meshClassification: [.floor], attributes: [ @@ -26,37 +28,36 @@ extension AccessibilityFeatureConfig { .widthLegacy, .runningSlopeLegacy, .crossSlopeLegacy, .widthFromImage, .runningSlopeFromImage, .crossSlopeFromImage ], - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true) ), AccessibilityFeatureClass( - id: "building", name: "Building", grayscaleValue: 2.0 / 255.0, labelValue: 2, + id: "building", name: "Building", kind: .building, + grayscaleValue: 2.0 / 255.0, labelValue: 2, color: CIColor(red: 0.275, green: 0.275, blue: 0.275), - oswPolicy: OSWPolicy(oswElementClass: .Building, isExistingFirst: false) ), AccessibilityFeatureClass( - id: "pole", name: "Pole", grayscaleValue: 3.0 / 255.0, labelValue: 3, + id: "pole", name: "Pole", kind: .pole, + grayscaleValue: 3.0 / 255.0, labelValue: 3, color: CIColor(red: 0.600, green: 0.600, blue: 0.600), - oswPolicy: OSWPolicy(oswElementClass: .Pole, isExistingFirst: false) ), AccessibilityFeatureClass( - id: "traffic_light", name: "Traffic light", grayscaleValue: 4.0 / 255.0, labelValue: 4, - color: CIColor(red: 0.980, green: 0.667, blue: 0.118), - oswPolicy: OSWPolicy(oswElementClass: .TrafficLight, isExistingFirst: false) + id: "traffic_light", name: "Traffic light", kind: .trafficLight, + grayscaleValue: 4.0 / 255.0, labelValue: 4, + color: CIColor(red: 0.980, green: 0.667, blue: 0.118) ), AccessibilityFeatureClass( - id: "traffic_sign", name: "Traffic sign", grayscaleValue: 5.0 / 255.0, labelValue: 5, + id: "traffic_sign", name: "Traffic sign", kind: .trafficSign, + grayscaleValue: 5.0 / 255.0, labelValue: 5, color: CIColor(red: 0.863, green: 0.863, blue: 0.000), - oswPolicy: OSWPolicy(oswElementClass: .TrafficSign, isExistingFirst: false) ), AccessibilityFeatureClass( - id: "vegetation", name: "Vegetation", grayscaleValue: 6.0 / 255.0, labelValue: 6, + id: "vegetation", name: "Vegetation", kind: .vegetation, + grayscaleValue: 6.0 / 255.0, labelValue: 6, color: CIColor(red: 0.420, green: 0.557, blue: 0.137), - oswPolicy: OSWPolicy(oswElementClass: .Vegetation, isExistingFirst: false) ), AccessibilityFeatureClass( diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift similarity index 98% rename from IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift index 87ed0c3f..315d0290 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift @@ -6,7 +6,6 @@ // import Foundation import CoreLocation -import PointNMapShared enum AccessibilityFeatureError: Error, LocalizedError { case attributeValueMismatch(attribute: AccessibilityFeatureAttribute, value: AccessibilityFeatureAttribute.Value) diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/DetectedFeature.swift similarity index 74% rename from IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/DetectedFeature.swift index f99e17a2..5242dc67 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/DetectedFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/DetectedFeature.swift @@ -4,20 +4,20 @@ // // Created by Himanshu on 11/9/25. // -import PointNMapShared +import CoreGraphics -struct ContourDetails: Sendable, Codable, Equatable, Hashable { - let centroid: CGPoint +public struct ContourDetails: Sendable, Codable, Equatable, Hashable { + public let centroid: CGPoint /// Bounding box in the normalized coordinates. - let boundingBox: CGRect - let normalizedPoints: [SIMD2] - let area: Float - let perimeter: Float + public let boundingBox: CGRect + public let normalizedPoints: [SIMD2] + public let area: Float + public let perimeter: Float /// Specialized property to hold the 4 points of the trapezoid that approximates the contour, if applicable. - let trapezoidPoints: [SIMD2]? + public let trapezoidPoints: [SIMD2]? - init( + public init( centroid: CGPoint, boundingBox: CGRect, normalizedPoints: [SIMD2], area: Float, perimeter: Float, trapezoidPoints: [SIMD2]? = nil ) { @@ -29,7 +29,7 @@ struct ContourDetails: Sendable, Codable, Equatable, Hashable { self.trapezoidPoints = trapezoidPoints } - init(normalizedPoints: [SIMD2], trapezoidPoints: [SIMD2]? = nil) { + public init(normalizedPoints: [SIMD2], trapezoidPoints: [SIMD2]? = nil) { let contourDetails = ContourUtils.getCentroidAreaBounds(normalizedPoints: normalizedPoints) self.centroid = contourDetails.centroid self.boundingBox = contourDetails.boundingBox @@ -39,7 +39,7 @@ struct ContourDetails: Sendable, Codable, Equatable, Hashable { self.trapezoidPoints = trapezoidPoints } - init(contourDetails: ContourDetails, trapezoidPoints: [SIMD2]? = nil) { + public init(contourDetails: ContourDetails, trapezoidPoints: [SIMD2]? = nil) { self.centroid = contourDetails.centroid self.boundingBox = contourDetails.boundingBox self.normalizedPoints = contourDetails.normalizedPoints @@ -49,7 +49,7 @@ struct ContourDetails: Sendable, Codable, Equatable, Hashable { } } -protocol DetectedFeatureProtocol: Equatable { +public protocol DetectedFeatureProtocol: Equatable { var accessibilityFeatureClass: AccessibilityFeatureClass { get } var contourDetails: ContourDetails { get } } @@ -63,11 +63,11 @@ protocol DetectedFeatureProtocol: Equatable { Eventually, the goal is to generalize this struct to include all details that would be used to represent a detected accessibility feature. This may include a sub-mesh, depth information, etc. */ -struct DetectedAccessibilityFeature: Sendable, Equatable, Hashable, DetectedFeatureProtocol { - let accessibilityFeatureClass: AccessibilityFeatureClass - let contourDetails: ContourDetails +public struct DetectedAccessibilityFeature: Sendable, Equatable, Hashable, DetectedFeatureProtocol { + public let accessibilityFeatureClass: AccessibilityFeatureClass + public let contourDetails: ContourDetails - init( + public init( accessibilityFeatureClass: AccessibilityFeatureClass, contourDetails: ContourDetails ) { diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift similarity index 73% rename from IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift index 54ed29d2..36a6105a 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift @@ -6,28 +6,27 @@ // import Foundation import CoreLocation -import PointNMapShared -class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatureProtocol, DetectedFeatureProtocol { - let id: UUID +public class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatureProtocol, DetectedFeatureProtocol { + public let id: UUID - let accessibilityFeatureClass: AccessibilityFeatureClass + public let accessibilityFeatureClass: AccessibilityFeatureClass - let contourDetails: ContourDetails + public let contourDetails: ContourDetails - var selectedAnnotationOption: AnnotationOption = .individualOption(.default) + public var selectedAnnotationOption: AnnotationOption = .individualOption(.default) - var locationDetails: LocationDetails? + public var locationDetails: LocationDetails? /// If isExisting is false, even if an osw element is associated, it means the feature is new. /// If isExisting is true, it means the feature corresponds to an existing real-world feature, and the oswElement (if present) represents that existing feature in OSW. - var isExisting: Bool = false - var oswElement: (any OSWElement)? + public var isExisting: Bool = false + public var oswElement: (any OSWElement)? - var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] = [:] + public var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] + public var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] + public var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] = [:] - init( + public init( id: UUID = UUID(), detectedAccessibilityFeature: DetectedAccessibilityFeature ) { @@ -48,7 +47,7 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur }) } - init( + public init( id: UUID = UUID(), accessibilityFeatureClass: AccessibilityFeatureClass, contourDetails: ContourDetails, @@ -70,29 +69,29 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur self.experimentalAttributeValues = experimentalAttributeValues } - func setAnnotationOption(_ option: AnnotationOption) { + public func setAnnotationOption(_ option: AnnotationOption) { self.selectedAnnotationOption = option } - func getLastLocationCoordinate() -> CLLocationCoordinate2D? { + public func getLastLocationCoordinate() -> CLLocationCoordinate2D? { guard let locationDetails else { return nil } guard let lastCoordinate = locationDetails.locations.last?.coordinates.last else { return nil } return lastCoordinate } - func setLocationDetails(locationDetails: LocationDetails) { + public func setLocationDetails(locationDetails: LocationDetails) { self.locationDetails = locationDetails } - func setIsExisting(_ isExisting: Bool) { + public func setIsExisting(_ isExisting: Bool) { self.isExisting = isExisting } - func setOSWElement(oswElement: any OSWElement) { + public func setOSWElement(oswElement: any OSWElement) { self.oswElement = oswElement } - func setAttributeValue( + public func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute, isCalculated: Bool = false, @@ -109,14 +108,14 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur } } - func setAttributeValue( + public func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { try setAttributeValue(value, for: attribute, isCalculated: false, isFinal: true) } - func setExperimentalAttributeValue( + public func setExperimentalAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { @@ -126,7 +125,7 @@ class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatur experimentalAttributeValues[attribute] = value } - static func == (lhs: EditableAccessibilityFeature, rhs: EditableAccessibilityFeature) -> Bool { + public static func == (lhs: EditableAccessibilityFeature, rhs: EditableAccessibilityFeature) -> Bool { return lhs.id == rhs.id } } diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift similarity index 72% rename from IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift rename to PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift index 8ccb9e21..21139adc 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift @@ -6,20 +6,19 @@ // import Foundation import CoreLocation -import PointNMapShared -struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, CustomStringConvertible { - let id: UUID +public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, CustomStringConvertible { + public let id: UUID - let accessibilityFeatureClass: AccessibilityFeatureClass + public let accessibilityFeatureClass: AccessibilityFeatureClass - var locationDetails: LocationDetails? - var oswElement: any OSWElement + public var locationDetails: LocationDetails? + public var oswElement: any OSWElement - var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] + public var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] + public var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] - init ( + public init ( id: UUID = UUID(), accessibilityFeature: (any AccessibilityFeatureProtocol), oswElement: any OSWElement @@ -32,7 +31,7 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo self.oswElement = oswElement } - init( + public init( id: UUID = UUID(), accessibilityFeatureClass: AccessibilityFeatureClass, locationDetails: LocationDetails?, @@ -48,17 +47,17 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo self.locationDetails = locationDetails } - func getLastLocationCoordinate() -> CLLocationCoordinate2D? { + public func getLastLocationCoordinate() -> CLLocationCoordinate2D? { guard let locationDetails else { return nil } guard let lastCoordinate = locationDetails.locations.last?.coordinates.last else { return nil } return lastCoordinate } - mutating func setLocationDetails(locationDetails: LocationDetails) { + public mutating func setLocationDetails(locationDetails: LocationDetails) { self.locationDetails = locationDetails } - mutating func setAttributeValue( + public mutating func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { guard attribute.isCompatible(with: value) else { @@ -67,7 +66,7 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo attributeValues[attribute] = value } - mutating func setExperimentalAttributeValue( + public mutating func setExperimentalAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { guard attribute.isCompatible(with: value) else { @@ -76,15 +75,15 @@ struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, Custo experimentalAttributeValues[attribute] = value } - mutating func setOSWElement(_ oswElement: any OSWElement) { + public mutating func setOSWElement(_ oswElement: any OSWElement) { self.oswElement = oswElement } - static func == (lhs: MappedAccessibilityFeature, rhs: MappedAccessibilityFeature) -> Bool { + public static func == (lhs: MappedAccessibilityFeature, rhs: MappedAccessibilityFeature) -> Bool { return lhs.id == rhs.id } - var description: String { + public var description: String { return "MappedAccessibilityFeature(id: \(id), class: \(accessibilityFeatureClass), location: \(String(describing: locationDetails)), attributes: \(attributeValues), oswElement: \(oswElement))" } } diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift similarity index 98% rename from IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift index f8b4bcf9..1b54f753 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourFeatureRasterizer.swift @@ -7,7 +7,6 @@ import CoreImage import UIKit -import PointNMapShared /** A temporary struct to perform rasterization of detected objects. @@ -34,7 +33,7 @@ public struct ContourFeatureRasterizer { return path } - static func rasterizeFeatures( + public static func rasterizeFeatures( detectedFeatures: [any DetectedFeatureProtocol], size: CGSize, polygonConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 2.0), boundsConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 2.0), @@ -90,7 +89,7 @@ public struct ContourFeatureRasterizer { return cgImage } - static func updateRasterizedFeatures( + public static func updateRasterizedFeatures( baseImage: CGImage, detectedFeature: [any DetectedFeatureProtocol], size: CGSize, polygonConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 2.0), @@ -153,7 +152,7 @@ public struct ContourFeatureRasterizer { /** Rasterizes filled contours for the given detected features. This is used for generating segmentation masks. */ - static func rasterizeFeaturesFill( + public static func rasterizeFeaturesFill( detectedFeatures: [any DetectedFeatureProtocol], size: CGSize, polygonConfig: RasterizeConfig = RasterizeConfig(color: .white, width: 1.0) ) -> CGImage? { diff --git a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourRequestProcessor.swift similarity index 93% rename from IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourRequestProcessor.swift index 1ae4b61a..70d87dd1 100644 --- a/IOSAccessAssessment/ComputerVision/Image/Contour/ContourRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Contour/ContourRequestProcessor.swift @@ -6,7 +6,6 @@ // import Vision import CoreImage -import PointNMapShared public enum ContourRequestProcessorError: Error, LocalizedError { case contourProcessingFailed @@ -31,15 +30,15 @@ public enum ContourRequestProcessorError: Error, LocalizedError { To reduce confusion, we can preemptively convert the coordinates to the top-left origin. We would also need to change ContourDetails to reflect this change, by not using CGPoint, CGRect, etc. which are based on the bottom-left origin, and instead use a custom struct that can represent the coordinates in the top-left origin. */ public struct ContourRequestProcessor { - var contourEpsilon: Float = 0.01 + public var contourEpsilon: Float = 0.01 /// For normalized points - var perimeterThreshold: Float = 0.01 - var selectedClasses: [AccessibilityFeatureClass] = [] + public var perimeterThreshold: Float = 0.01 + public var selectedClasses: [AccessibilityFeatureClass] = [] // var selectedClassLabels: [UInt8] = [] - var binaryMaskFilter: BinaryMaskFilter + public var binaryMaskFilter: BinaryMaskFilter - init( + public init( contourEpsilon: Float = 0.01, perimeterThreshold: Float = 0.01, selectedClasses: [AccessibilityFeatureClass] = [] ) throws { self.contourEpsilon = contourEpsilon @@ -48,7 +47,7 @@ public struct ContourRequestProcessor { self.binaryMaskFilter = try BinaryMaskFilter() } - mutating func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { + public mutating func setSelectedClasses(_ selectedClasses: [AccessibilityFeatureClass]) { self.selectedClasses = selectedClasses } @@ -60,7 +59,7 @@ public struct ContourRequestProcessor { /** Function to rasterize the detected objects on the image. Creates a unique request and handler since it is run on a separate thread */ - func getFeaturesFromBinaryImage( + public func getFeaturesFromBinaryImage( for binaryImage: CIImage, targetClass: AccessibilityFeatureClass, orientation: CGImagePropertyOrientation = .up ) throws -> [DetectedAccessibilityFeature] { let contourRequest = VNDetectContoursRequest() @@ -99,7 +98,7 @@ public struct ContourRequestProcessor { Processes each class in parallel to get the objects. */ // TODO: Using DispatchQueue.concurrentPerform for parallel processing may not be the best approach for CPU-bound tasks. - func processRequest( + public func processRequest( from segmentationImage: CIImage, orientation: CGImagePropertyOrientation = .up ) throws -> [DetectedAccessibilityFeature] { var detectedFeatures: [DetectedAccessibilityFeature] = [] diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift index 73c6d940..ede7f38f 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Image/Depth/DepthMapProcessor.swift @@ -90,4 +90,86 @@ public struct DepthMapProcessor { let depths = try getDepthsAtPoints(points: featurePoints) return depths } + + /** + Retrieves the depth value at the centroid of the given accessibility feature. + + - Parameters: + - accessibilityFeature: The AccessibilityFeature object containing the detected feature. + + - Returns: The depth value at the centroid of the feature. + + - Throws: DepthMapProcessorError.unableToAccessDepthData if depth data cannot be accessed. + DepthMapProcessorError.invalidDepth if the retrieved depth value is invalid. + + - Note: The centroid coordinates are normalized (0 to 1) and need to be converted to pixel coordinates. + */ + public func getFeatureDepthAtCentroid(detectedFeature: any DetectedFeatureProtocol) throws -> Float { + let featureContourDetails = detectedFeature.contourDetails + let featureCentroid = featureContourDetails.centroid + + let featureCentroidPoint: CGPoint = CGPoint( + x: featureCentroid.x * CGFloat(depthWidth), + y: (1 - featureCentroid.y) * CGFloat(depthHeight) + ) + return try getDepthAtPoint(point: featureCentroidPoint) + } + + /** + Retrieves the average depth value within a specified radius around the centroid of the given accessibility feature. + + - Parameters: + - accessibilityFeature: The AccessibilityFeature object containing the detected feature. + - radius: The radius (in pixels) around the centroid to consider for averaging depth values. Default is 5 pixels. + + - Returns: The average depth value within the specified radius around the feature's centroid. + + - Throws: DepthMapProcessorError.unableToAccessDepthData if depth data cannot be accessed. + DepthMapProcessorError.invalidDepth if no valid depth values are found within the radius. + + - Note: The centroid coordinates are normalized (0 to 1) and need to be converted to pixel coordinates. + */ + public func getFeatureDepthAtCentroidInRadius(detectedFeature: any DetectedFeatureProtocol, radius: CGFloat = 5) throws -> Float { + let featureContourDetails = detectedFeature.contourDetails + let featureCentroid = featureContourDetails.centroid + + var pointDeltas: [CGPoint] = [] + for xDelta in stride(from: -radius, through: radius, by: 1) { + for yDelta in stride(from: -radius, through: radius, by: 1) { + let distance = sqrt(xDelta * xDelta + yDelta * yDelta) + if distance <= radius { + pointDeltas.append(CGPoint(x: xDelta, y: yDelta)) + } + } + } + + let featureCentroidRadiusPoints: [CGPoint] = pointDeltas.map { delta in + CGPoint( + x: featureCentroid.x * CGFloat(depthWidth) + delta.x, + /// Symmetry in circle ensures that we do not worry about the sign of delta.y here + y: (1 - featureCentroid.y) * CGFloat(depthHeight) + delta.y + ) + } + let depths = try getDepthsAtPoints(points: featureCentroidRadiusPoints) + let validDepths = depths.filter { $0.isFinite && $0 > 0 } + guard !validDepths.isEmpty else { + throw DepthMapProcessorError.invalidDepth + } + let averageDepth = validDepths.reduce(0, +) / Float(validDepths.count) + return averageDepth + } + + public func getFeatureDepthsAtBounds(detectedFeature: any DetectedFeatureProtocol) throws -> [Float] { + let featureContourDetails = detectedFeature.contourDetails + let normalizedPoints: [SIMD2] = featureContourDetails.normalizedPoints + + let featureBoundPoints: [CGPoint] = normalizedPoints.map { point in + CGPoint( + x: CGFloat(point.x * Float(depthWidth)), + y: CGFloat((1 - point.y) * Float(depthHeight)) + ) + } + let depths = try getDepthsAtPoints(points: featureBoundPoints) + return depths + } } diff --git a/IOSAccessAssessment/ComputerVision/Mesh/CapturedMeshSnapshot.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift similarity index 96% rename from IOSAccessAssessment/ComputerVision/Mesh/CapturedMeshSnapshot.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift index f0ac77fd..eb4ac2f6 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/CapturedMeshSnapshot.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift @@ -6,14 +6,15 @@ // import ARKit import RealityKit +import PointNMapShaderTypes -enum CapturedMeshSnapshotError: Error, LocalizedError { +public enum CapturedMeshSnapshotError: Error, LocalizedError { case invalidMeshData case invalidVertexData case invalidIndexData case meshClassNotFound(AccessibilityFeatureClass) - var errorDescription: String? { + public var errorDescription: String? { switch self { case .invalidMeshData: return "The mesh data in the segmentation mesh record is invalid" @@ -28,8 +29,8 @@ enum CapturedMeshSnapshotError: Error, LocalizedError { } @MainActor -final class CapturedMeshSnapshotGenerator { - func snapshotSegmentationRecords( +public final class CapturedMeshSnapshotGenerator { + public func snapshotSegmentationRecords( from: [AccessibilityFeatureClass: SegmentationMeshRecord], vertexStride: Int, vertexOffset: Int, @@ -56,7 +57,7 @@ final class CapturedMeshSnapshotGenerator { ) } - func createSnapshot( + public func createSnapshot( segmentationRecord: SegmentationMeshRecord ) throws -> CapturedMeshAnchorSnapshot { let lowLevelMesh = segmentationRecord.mesh @@ -102,11 +103,11 @@ final class CapturedMeshSnapshotGenerator { Helper class for CapturedMeshSnapshot related operations. Can be used for processing the mesh snapshot, even outside the main actor. */ -final class CapturedMeshSnapshotHelper { +public final class CapturedMeshSnapshotHelper { /** TODO: Instead of simd3, use packed simd types that match the vertex format in the snapshot to avoid unnecessary conversions. */ - static func readFeatureSnapshot( + public static func readFeatureSnapshot( capturedMeshSnapshot: CapturedMeshSnapshot, accessibilityFeatureClass: AccessibilityFeatureClass ) throws -> [MeshPolygon] { @@ -172,7 +173,7 @@ final class CapturedMeshSnapshotHelper { ).polygons } - static func readFeatureSnapshot( + public static func readFeatureSnapshot( capturedMeshSnapshot: CapturedMeshSnapshot, accessibilityFeatureClass: AccessibilityFeatureClass ) throws -> MeshContents { diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift new file mode 100644 index 00000000..3af6ad9d --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/CapturedMeshDefinitions.swift @@ -0,0 +1,27 @@ +// +// CapturedMeshDefinitions.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/27/25. +// +import Foundation +import ARKit + +public struct CapturedMeshAnchorSnapshot: Sendable { + public let vertexData: Data + public let indexData: Data + + public let vertexCount: Int + public let indexCount: Int +} + +public struct CapturedMeshSnapshot: Sendable { + public let anchors: [AccessibilityFeatureClass: CapturedMeshAnchorSnapshot] + + public let vertexStride: Int + public let vertexOffset: Int + public let indexStride: Int + public let classificationStride: Int + + public let totalVertexCount: Int +} diff --git a/IOSAccessAssessment/ComputerVision/Mesh/MeshPipeline.metal b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshPipeline.metal similarity index 100% rename from IOSAccessAssessment/ComputerVision/Mesh/MeshPipeline.metal rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshPipeline.metal diff --git a/IOSAccessAssessment/ComputerVision/Mesh/SegmentationMeshRecord.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/SegmentationMeshRecord.swift similarity index 92% rename from IOSAccessAssessment/ComputerVision/Mesh/SegmentationMeshRecord.swift rename to PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/SegmentationMeshRecord.swift index 3bdd9d65..a81ae02a 100644 --- a/IOSAccessAssessment/ComputerVision/Mesh/SegmentationMeshRecord.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/SegmentationMeshRecord.swift @@ -7,8 +7,9 @@ import ARKit import RealityKit import MetalKit +import PointNMapShaderTypes -enum SegmentationMeshRecordError: Error, LocalizedError { +public enum SegmentationMeshRecordError: Error, LocalizedError { case isProcessingTrue case emptySegmentation case segmentationTextureError @@ -19,7 +20,7 @@ enum SegmentationMeshRecordError: Error, LocalizedError { case meshPipelineBlitEncoderError case unexpectedError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .isProcessingTrue: return "The Segmentation Mesh Pipeline is already processing a request." @@ -44,23 +45,23 @@ enum SegmentationMeshRecordError: Error, LocalizedError { } @MainActor -final class SegmentationMeshRecord { - let entity: ModelEntity - let name: String - let color: UIColor - let opacity: Float +public final class SegmentationMeshRecord { + public let entity: ModelEntity + public let name: String + public let color: UIColor + public let opacity: Float - var mesh: LowLevelMesh - var vertexCount: Int - var indexCount: Int + public var mesh: LowLevelMesh + public var vertexCount: Int + public var indexCount: Int - let accessibilityFeatureClass: AccessibilityFeatureClass - let accessibilityFeatureMeshClassificationParams: AccessibilityFeatureMeshClassificationParams + public let accessibilityFeatureClass: AccessibilityFeatureClass + public let accessibilityFeatureMeshClassificationParams: AccessibilityFeatureMeshClassificationParams - let context: MetalContext - let pipelineState: MTLComputePipelineState + public let context: MetalContext + public let pipelineState: MTLComputePipelineState - init( + public init( _ context: MetalContext, meshGPUSnapshot: MeshGPUSnapshot, segmentationImage: CIImage, @@ -68,7 +69,10 @@ final class SegmentationMeshRecord { accessibilityFeatureClass: AccessibilityFeatureClass ) throws { self.context = context - guard let kernelFunction = context.device.makeDefaultLibrary()?.makeFunction(name: "processMesh") else { + let library = try context.device.makeDefaultLibrary( + bundle: PointNMapSharedResources.bundle + ) + guard let kernelFunction = library.makeFunction(name: "processMesh") else { throw SegmentationMeshRecordError.metalInitializationError } self.pipelineState = try context.device.makeComputePipelineState(function: kernelFunction) @@ -95,7 +99,7 @@ final class SegmentationMeshRecord { ) } - func replace( + public func replace( meshGPUSnapshot: MeshGPUSnapshot, segmentationImage: CIImage, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -107,7 +111,7 @@ final class SegmentationMeshRecord { ) } - func update( + public func update( meshGPUSnapshot: MeshGPUSnapshot, segmentationImage: CIImage, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3 @@ -319,7 +323,7 @@ final class SegmentationMeshRecord { /** Function to set up the AccessibilityFeatureMeshClassificationParams struct based on the provided AccessibilityFeatureClass. */ - static func getAccessibilityFeatureMeshClassificationParams( + public static func getAccessibilityFeatureMeshClassificationParams( accessibilityFeatureClass: AccessibilityFeatureClass ) throws -> AccessibilityFeatureMeshClassificationParams { let accessibilityFeatureMeshClassificationLookupTable = getAccessibilityFeatureMeshClassificationLookupTable( @@ -344,7 +348,7 @@ final class SegmentationMeshRecord { Return an array of booleans for metal, indicating which accessibility feature classes are to be considered. If the accessibilityFeatureClass.meshClassification is empty, all classes are considered valid. */ - static func getAccessibilityFeatureMeshClassificationLookupTable( + public static func getAccessibilityFeatureMeshClassificationLookupTable( accessibilityFeatureClass: AccessibilityFeatureClass ) -> [UInt32] { // MARK: Assuming a maximum of 256 classes @@ -361,7 +365,7 @@ final class SegmentationMeshRecord { return lookupTable } - static func createDescriptor(meshGPUSnapshot: MeshGPUSnapshot) -> LowLevelMesh.Descriptor { + public static func createDescriptor(meshGPUSnapshot: MeshGPUSnapshot) -> LowLevelMesh.Descriptor { let vertexCount = meshGPUSnapshot.anchors.values.reduce(0) { $0 + $1.vertexCount } let indexCount = meshGPUSnapshot.anchors.values.reduce(0) { $0 + $1.indexCount } var descriptor = LowLevelMesh.Descriptor() @@ -379,7 +383,7 @@ final class SegmentationMeshRecord { return descriptor } - static func generateEntity(mesh: LowLevelMesh, color: UIColor, opacity: Float, name: String) throws -> ModelEntity { + public static func generateEntity(mesh: LowLevelMesh, color: UIColor, opacity: Float, name: String) throws -> ModelEntity { let resource = try MeshResource(from: mesh) var material = UnlitMaterial(color: color.withAlphaComponent(CGFloat(opacity))) material.triangleFillMode = .fill diff --git a/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift b/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift index 5c322cea..35d9e4d0 100644 --- a/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift +++ b/PointNMapShared/Sources/PointNMap/Geospatial/LocationHelpers.swift @@ -144,5 +144,362 @@ public struct LocationHelpers { return minDistance } } + + /** + Calculates the distance between two locations represented by their location details if they have similar geometry types. + Not commutative, checks distance from source to destination, so the order of the parameters matters. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + First, checks the geometry types of the source and destination location details (e.g., point, linestring, polygon) based on the properties of their last location element. Then, based on the geometry types, it calls the appropriate distance calculation method (e.g., distanceBetweenPoints, distanceFromPointToLineString, distanceFromPointToPolygon, distanceBetweenLineStrings, distanceFromLineStringToPolygon, distanceBetweenPolygons) to compute the distance between the two locations. + */ + public static func distanceBetweenSimilarOSMLocationDetails( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLastLocationElement = srcLocationDetails.locations.last else { + return nil + } +// let isSrcMultipolygon = srcLocationDetails.locations.count > 1 + let isSrcPolygon = srcLastLocationElement.isWay && srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + let isSrcLineString = srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + let isSrcPoint = !srcLastLocationElement.isWay && !srcLastLocationElement.isClosed // && (!isSrcMultipolygon) + + guard let dstLastLocationElement = dstLocationDetails.locations.last else { + return nil + } +// let isDstMultipolygon = dstLocationDetails.locations.count > 1 + let isDstPolygon = dstLastLocationElement.isWay && dstLastLocationElement.isClosed // && (!isDstMultipolygon) + let isDstLineString = dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) + let isDstPoint = !dstLastLocationElement.isWay && !dstLastLocationElement.isClosed // && (!isDstMultipolygon) + + if isSrcPoint && isDstPoint { + return distanceBetweenPoints(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else if isSrcLineString && isDstLineString { + return distanceBetweenLineStrings(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else if isSrcPolygon && isDstPolygon { + return distanceBetweenPolygons(srcLocationDetails: srcLocationDetails, dstLocationDetails: dstLocationDetails) + } else { + return nil + } + } + + /** + Calculates the distance between two points represented by their location details. The distance is returned in meters. + Unit of distance is determined by MapKit's MKMapPoint. + */ + public static func distanceBetweenPoints( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == false, dstLocationElement.isClosed == false, + let dstLocationCoordinate = dstLocationElement.coordinates.last else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let dstLocation = CLLocation(latitude: dstLocationCoordinate.latitude, longitude: dstLocationCoordinate.longitude) + return MKDistanceHelpers.distanceBetweenPoints(srcPoint: MKMapPoint(srcLocationCoordinate), dstPoint: MKMapPoint(dstLocationCoordinate)) + } + + /** + Calculates the shortest distance from a point to a linestring represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestring into map points, then iterates through each line segment of the linestring and calculates the distance from the point to that line segment using the distanceFromPointToLineSegment method. The minimum distance found across all segments is returned as the distance from the point to the linestring. + */ + public static func distanceFromPointToLineString( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let srcMapPoint = MKMapPoint(srcLocationCoordinate) + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcMapPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + return minDistance + } + + /** + Calculates the shortest distance from a point to a polygon (single polygon) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygon into map points, then iterates through each edge of the polygon and calculates the distance from the point to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges is returned as the distance from the point to the polygon. If the point is inside the polygon, the distance returned is 0. + */ + public static func distanceFromPointToPolygon( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == false, srcLocationElement.isClosed == false, + let srcLocationCoordinate = srcLocationElement.coordinates.last, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocation = CLLocation(latitude: srcLocationCoordinate.latitude, longitude: srcLocationCoordinate.longitude) + let srcMapPoint = MKMapPoint(srcLocationCoordinate) + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + return MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcMapPoint, polygonPoints: dstMapPoints) + } + +// static func distanceFromPointToMultiPolygon( +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails +// ) -> Double? { +// var minDistance: Double = Double.infinity +// dstLocationDetails.locations.forEach { locationElement in +// guard locationElement.isWay == true, locationElement.isClosed == true else { +// return +// } +// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) +// if let distance = distanceFromPointToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { +// minDistance = min(minDistance, distance) +// } +// } +// return minDistance +// } + + /** + Calculates the shortest distance between two linestrings represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestrings into map points, then iterates through each line segment of the dst linestring and calculates the distance from each point in the source linestring to that line segment. The minimum distance found across all segments and points is returned as the distance between the two linestrings. + + - Warning: + The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two linestrings may partially overlap with each other, and the distance should reflect how much of the linestrings are outside of each other rather than just indicating that there is some overlap. + */ + public static func distanceBetweenLineStrings( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == false, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == false else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + } + return minDistance + } + + /** + Calculates the shortest distance from a linestring to a polygon (single polygon) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the linestring and polygon into map points, then iterates through each edge of the polygon and calculates the distance from each point in the linestring to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance from the linestring to the polygon. If any point of the linestring is inside the polygon, the distance returned is 0. + + - Warning: + The logic for overlapping linestring needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, a linestring may partially overlap with a polygon, and the distance should reflect how much of the linestring is outside the polygon rather than just indicating that there is some overlap. + */ + public static func distanceFromLineStringToPolygon( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == false, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for i in 0..<(dstMapPoints.count - 1) { + let lineStart = dstMapPoints[i] + let lineEnd = dstMapPoints[i + 1] + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToLineSegment( + srcPoint: srcPoint, lineStart: lineStart, lineEnd: lineEnd + ) { + minDistance = min(minDistance, distance) + } + } + } + return minDistance + } + +// static func distanceFromLineStringToMultiPolygon( +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails +// ) -> Double? { +// var minDistance: Double = Double.infinity +// dstLocationDetails.locations.forEach { locationElement in +// guard locationElement.isWay == true, locationElement.isClosed == true else { +// return +// } +// let singlePolygonLocationDetails = LocationDetails(locations: [locationElement]) +// if let distance = distanceFromLineStringToPolygon(srcLocationDetails: srcLocationDetails, dstLocationDetails: singlePolygonLocationDetails) { +// minDistance = min(minDistance, distance) +// } +// } +// return minDistance +// } + + /** + Calculates the shortest distance between two polygons (single polygons) represented by their location details. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygons into map points, then iterates through each edge of the first polygon and calculates the distance from each point in the second polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. If any point of one polygon is inside the other polygon, the distance returned is 0. + + - Warning: + The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. + */ + public static func distanceBetweenPolygons( + srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails + ) -> Double? { + guard let srcLocationElement = srcLocationDetails.locations.last, + srcLocationElement.isWay == true, srcLocationElement.isClosed == true, + let dstLocationElement = dstLocationDetails.locations.last, + dstLocationElement.isWay == true, dstLocationElement.isClosed == true else { + return nil + } + let srcLocationCoordinates = srcLocationElement.coordinates + let srcLocations = srcLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let srcMapPoints: [MKMapPoint] = srcLocations.map { MKMapPoint($0) } + let dstLocationCoordinates = dstLocationElement.coordinates + let dstLocations = dstLocationCoordinates.map { CLLocationCoordinate2D(latitude: $0.latitude, longitude: $0.longitude) } + let dstMapPoints: [MKMapPoint] = dstLocations.map { MKMapPoint($0) } + + var minDistance: Double = Double.infinity + for srcPoint in srcMapPoints { + if let distance = MKDistanceHelpers.distanceFromPointToPolygon(srcPoint: srcPoint, polygonPoints: dstMapPoints) { + minDistance = min(minDistance, distance) + } + } + return minDistance + } + + /** + Calculates the shortest distance between two polygons represented by their location details. + Can have negative distance if there is polygon overlap, where the absolute value of negative distances represents the degree of overlap. + Unit of distance is determined by MapKit's MKMapPoint. + + - Note: + Converts the coordinates of the polygons into map points, then iterates through each edge of the source polygon and calculates the distance from each point in the destination polygon to that edge using the distanceFromPointToLineSegment method. The minimum distance found across all edges and points is returned as the distance between the two polygons. + + - Warning: + The logic for overlapping polygons needs to be updated, so that it captures the degree of overlap instead of just returning 0. This is because in some cases, two polygons may partially overlap with each other, and the distance should reflect how much of the polygons are outside of each other rather than just indicating that there is some overlap. + + - Warning: + Currently, this algorithm doesn't actually consider the relation role of each multi-polygon member (e.g. outer vs inner), which can lead to inaccurate distance calculations in some cases. For example, if one of the multi-polygons has an inner member that overlaps with the other multi-polygon, the distance should be negative to reflect the degree of overlap. However, without considering the relation type, the algorithm may simply return a distance of 0 for this case, which does not accurately capture the spatial relationship between the two multi-polygons. + */ +// static func distanceBetweenMultiPolygons( +// srcLocationDetails: LocationDetails, dstLocationDetails: LocationDetails +// ) -> Double? { +// let srcLocationCoordinateArrays = srcLocationDetails.locations +// let dstLocationCoordinateArrays = dstLocationDetails.locations +// guard srcLocationCoordinateArrays.count > 0, dstLocationCoordinateArrays.count > 0 else { +// return nil +// } +// +// var minDistance: Double = Double.infinity +// for srcLocationCoordinateArray in srcLocationCoordinateArrays { +// for dstLocationCoordinateArray in dstLocationCoordinateArrays { +// let srcOSMLocationDetails = LocationDetails(locations: [srcLocationCoordinateArray]) +// let dstOSMLocationDetails = LocationDetails(locations: [dstLocationCoordinateArray]) +// /// While deciding the geometry, we are not using the .polygon enumeration, since that actually represents a multipolygon in OSW. +// let srcGeometry: OSWGeometry = srcLocationCoordinateArray.isWay ? .linestring : .point +// let isSrcPolygon = srcLocationCoordinateArray.isWay && srcLocationCoordinateArray.isClosed +// let dstGeometry: OSWGeometry = dstLocationCoordinateArray.isWay ? .linestring : .point +// let isDstPolygon = dstLocationCoordinateArray.isWay && dstLocationCoordinateArray.isClosed +// +// /// Must ensure the same units (in this case, decided by MKMapPoint) +// if (srcGeometry == .point && dstGeometry == .point) { +// guard let distance = distanceBetweenPoints( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if (srcGeometry == .point && (dstGeometry == .linestring && !isDstPolygon)) { +// guard let distance = distanceFromPointToLineString( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if (srcGeometry == .point && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceFromPointToPolygon( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && !isDstPolygon)) { +// guard let distance = distanceBetweenLineStrings( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && !isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceFromLineStringToPolygon( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// minDistance = min(minDistance, distance) +// } +// else if ((srcGeometry == .linestring && isSrcPolygon) && (dstGeometry == .linestring && isDstPolygon)) { +// guard let distance = distanceBetweenPolygons( +// srcLocationDetails: srcOSMLocationDetails, dstLocationDetails: dstOSMLocationDetails +// ) else { +// continue +// } +// } +// else { +// continue +// } +// } +// } +// return minDistance +// } } diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift index a45ec913..ecd74edc 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift @@ -7,7 +7,6 @@ import CoreML import Vision import CoreImage -import PointNMapShared public enum SegmentationModelError: Error, LocalizedError { case modelLoadingError diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift index 90315373..1adcc524 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -6,12 +6,11 @@ // import SwiftUI +import Combine import Vision import CoreML -import OrderedCollections import simd -import PointNMapShared public enum SegmentationARPipelineError: Error, LocalizedError { case isProcessingTrue @@ -190,8 +189,8 @@ public final class SegmentationARPipeline: ObservableObject { var depthFilteredSegmentationImage: CIImage? = nil if let depthImage, let depthFilter = self.depthFilter { // Apply depth filtering to the segmentation image - let depthMinThresholdValue = SharedAppConstants.DepthConstants.depthMinThreshold - let depthMaxThresholdValue = SharedAppConstants.DepthConstants.depthMaxThreshold + let depthMinThresholdValue = PointNMapConstants.DepthConstants.depthMinThreshold + let depthMaxThresholdValue = PointNMapConstants.DepthConstants.depthMaxThreshold depthFilteredSegmentationImage = try depthFilter.apply( to: segmentationImage, depthImage: depthImage, depthMinThreshold: depthMinThresholdValue, depthMaxThreshold: depthMaxThresholdValue diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift index 1ef43670..defc2760 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift @@ -6,11 +6,10 @@ // import SwiftUI +import Combine import Vision -import OrderedCollections import simd -import PointNMapShared public enum SegmentationAnnotationPipelineError: Error, LocalizedError { case isProcessingTrue @@ -226,7 +225,8 @@ public final class SegmentationAnnotationPipeline: ObservableObject { /// TODO: Handle sidewalk feature differently if needed, and improve the relevant trapezoid-creation logic. let largestFeature = detectedFeatures.sorted(by: {$0.contourDetails.area > $1.contourDetails.area}).first guard let largestFeature = largestFeature, - accessibilityFeatureClass.oswPolicy.oswElementClass == .Sidewalk else { + let largestFeatureClassKind = accessibilityFeatureClass.kind, + largestFeatureClassKind == .sidewalk else { self.isProcessing = false return detectedFeatures } diff --git a/IOSAccessAssessment/Shared/Definitions/CaptureData.swift b/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift similarity index 70% rename from IOSAccessAssessment/Shared/Definitions/CaptureData.swift rename to PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift index 9ff3d4fd..96472d38 100644 --- a/IOSAccessAssessment/Shared/Definitions/CaptureData.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift @@ -6,22 +6,21 @@ // import SwiftUI -import DequeModule import simd import ARKit -struct CaptureImageDataResults: Sendable { - let segmentationLabelImage: CIImage - let segmentedClasses: [AccessibilityFeatureClass] +public struct CaptureImageDataResults: Sendable { + public let segmentationLabelImage: CIImage + public let segmentedClasses: [AccessibilityFeatureClass] /// Map of detected accessibility features with their UUIDs. Not currently used but reserved for potential future use. - let detectedFeatureMap: [UUID: DetectedAccessibilityFeature] + public let detectedFeatureMap: [UUID: DetectedAccessibilityFeature] } -struct CaptureMeshDataResults: Sendable { - let segmentedMesh: CapturedMeshSnapshot - let meshAnchors: [ARMeshAnchor]? +public struct CaptureMeshDataResults: Sendable { + public let segmentedMesh: CapturedMeshSnapshot + public let meshAnchors: [ARMeshAnchor]? - init(segmentedMesh: CapturedMeshSnapshot, meshAnchors: [ARMeshAnchor]? = nil) { + public init(segmentedMesh: CapturedMeshSnapshot, meshAnchors: [ARMeshAnchor]? = nil) { self.segmentedMesh = segmentedMesh self.meshAnchors = meshAnchors } @@ -32,7 +31,7 @@ struct CaptureMeshDataResults: Sendable { NOTE: This protocol is designed to prevent over-dependence on mesh data in scenarios where LIDAR is unavailable or ARKit fails to provide mesh data. */ -protocol CaptureDataProtocol: Sendable, Identifiable { +public protocol CaptureDataProtocol: Sendable, Identifiable { var id: UUID { get } var timestamp: TimeInterval { get } var cameraImage: CIImage { get } @@ -46,31 +45,31 @@ protocol CaptureDataProtocol: Sendable, Identifiable { var confidenceImage: CIImage? { get } } -protocol CaptureImageDataProtocol: CaptureDataProtocol { +public protocol CaptureImageDataProtocol: CaptureDataProtocol { var captureImageDataResults: CaptureImageDataResults { get } } -protocol CaptureMeshDataProtocol: CaptureDataProtocol { +public protocol CaptureMeshDataProtocol: CaptureDataProtocol { var captureMeshDataResults: CaptureMeshDataResults { get } } -struct CaptureImageData: CaptureImageDataProtocol { - let id: UUID - let timestamp: TimeInterval +public struct CaptureImageData: CaptureImageDataProtocol { + public let id: UUID + public let timestamp: TimeInterval - let cameraImage: CIImage - let cameraTransform: simd_float4x4 - let cameraIntrinsics: simd_float3x3 + public let cameraImage: CIImage + public let cameraTransform: simd_float4x4 + public let cameraIntrinsics: simd_float3x3 - let interfaceOrientation: UIInterfaceOrientation - let originalSize: CGSize + public let interfaceOrientation: UIInterfaceOrientation + public let originalSize: CGSize - let depthImage: CIImage? - let confidenceImage: CIImage? + public let depthImage: CIImage? + public let confidenceImage: CIImage? - let captureImageDataResults: CaptureImageDataResults + public let captureImageDataResults: CaptureImageDataResults - init( + public init( id: UUID, timestamp: TimeInterval, cameraImage: CIImage, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, interfaceOrientation: UIInterfaceOrientation, originalSize: CGSize, @@ -89,7 +88,7 @@ struct CaptureImageData: CaptureImageDataProtocol { self.captureImageDataResults = captureImageDataResults } - init(_ captureImageData: (any CaptureImageDataProtocol)) { + public init(_ captureImageData: (any CaptureImageDataProtocol)) { self.id = captureImageData.id self.timestamp = captureImageData.timestamp self.cameraImage = captureImageData.cameraImage @@ -103,24 +102,24 @@ struct CaptureImageData: CaptureImageDataProtocol { } } -struct CaptureImageAndMeshData: CaptureImageDataProtocol, CaptureMeshDataProtocol { - let id: UUID - let timestamp: TimeInterval +public struct CaptureImageAndMeshData: CaptureImageDataProtocol, CaptureMeshDataProtocol { + public let id: UUID + public let timestamp: TimeInterval - let cameraImage: CIImage - let cameraTransform: simd_float4x4 - let cameraIntrinsics: simd_float3x3 + public let cameraImage: CIImage + public let cameraTransform: simd_float4x4 + public let cameraIntrinsics: simd_float3x3 - let interfaceOrientation: UIInterfaceOrientation - let originalSize: CGSize + public let interfaceOrientation: UIInterfaceOrientation + public let originalSize: CGSize - let depthImage: CIImage? - let confidenceImage: CIImage? + public let depthImage: CIImage? + public let confidenceImage: CIImage? - let captureImageDataResults: CaptureImageDataResults - let captureMeshDataResults: CaptureMeshDataResults + public let captureImageDataResults: CaptureImageDataResults + public let captureMeshDataResults: CaptureMeshDataResults - init( + public init( id: UUID, timestamp: TimeInterval, cameraImage: CIImage, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, interfaceOrientation: UIInterfaceOrientation, originalSize: CGSize, @@ -141,7 +140,7 @@ struct CaptureImageAndMeshData: CaptureImageDataProtocol, CaptureMeshDataProtoco self.captureMeshDataResults = captureMeshDataResults } - init(captureImageData: CaptureImageData, captureMeshDataResults: CaptureMeshDataResults) { + public init(captureImageData: CaptureImageData, captureMeshDataResults: CaptureMeshDataResults) { self.id = captureImageData.id self.timestamp = captureImageData.timestamp self.cameraImage = captureImageData.cameraImage @@ -159,11 +158,11 @@ struct CaptureImageAndMeshData: CaptureImageDataProtocol, CaptureMeshDataProtoco /** We need a type wrapper that can conditionally represent one of CaptureImageData, CaptureImageAndMeshData, etc. */ -enum CaptureData: Sendable, Identifiable { +public enum CaptureData: Sendable, Identifiable { case imageData(CaptureImageData) case imageAndMeshData(CaptureImageAndMeshData) - var id: UUID { + public var id: UUID { switch self { case .imageData(let data): return data.id @@ -172,7 +171,7 @@ enum CaptureData: Sendable, Identifiable { } } - var imageData: any CaptureImageDataProtocol { + public var imageData: any CaptureImageDataProtocol { switch self { case .imageData(let data): return data @@ -181,7 +180,7 @@ enum CaptureData: Sendable, Identifiable { } } - var meshData: (any CaptureMeshDataProtocol)? { + public var meshData: (any CaptureMeshDataProtocol)? { switch self { case .imageData(_): return nil diff --git a/IOSAccessAssessment/Shared/Definitions/MetalContext.swift b/PointNMapShared/Sources/PointNMap/Shared/Definitions/MetalContext.swift similarity index 81% rename from IOSAccessAssessment/Shared/Definitions/MetalContext.swift rename to PointNMapShared/Sources/PointNMap/Shared/Definitions/MetalContext.swift index a1f582d3..bb9cde37 100644 --- a/IOSAccessAssessment/Shared/Definitions/MetalContext.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/Definitions/MetalContext.swift @@ -11,11 +11,11 @@ import RealityKit import simd import MetalKit -enum MetalContextError: Error, LocalizedError { +public enum MetalContextError: Error, LocalizedError { case metalDeviceUnavailable case metalInitializationError - var errorDescription: String? { + public var errorDescription: String? { switch self { case .metalDeviceUnavailable: return NSLocalizedString("Metal device is unavailable on this device.", comment: "") @@ -25,17 +25,17 @@ enum MetalContextError: Error, LocalizedError { } } -final class MetalContext { - let device: MTLDevice - let commandQueue: MTLCommandQueue +public final class MetalContext { + public let device: MTLDevice + public let commandQueue: MTLCommandQueue // let pipelineState: MTLComputePipelineState - let textureCache: CVMetalTextureCache - let textureLoader: MTKTextureLoader + public let textureCache: CVMetalTextureCache + public let textureLoader: MTKTextureLoader - let ciContext: CIContext - let ciContextNoColorSpace: CIContext + public let ciContext: CIContext + public let ciContextNoColorSpace: CIContext - init() throws { + public init() throws { let device = MTLCreateSystemDefaultDevice() guard let device = device else { throw MetalContextError.metalDeviceUnavailable From bd6cd1c1c31231881958a62a8662dde7cd6147bc Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Thu, 30 Apr 2026 19:46:41 -0700 Subject: [PATCH 11/14] Additional file transfers --- IOSAccessAssessment.xcodeproj/project.pbxproj | 72 ++++++------------- .../Extensions/IsExistingExtension.swift | 36 ++++++++++ .../AccessibilityFeatureKindExtension.swift | 3 + .../Config/Others/CityscapesClassConfig.swift | 8 +-- .../Others/CityscapesSubsetClassConfig.swift | 4 +- .../Others/CocoCustom35ClassConfig.swift | 7 +- .../Others/CocoCustom53ClassConfig.swift | 9 +-- .../Config/Others/VOCClassConfig.swift | 2 +- .../AccessibilityFeatureEncoder.swift | 1 + .../AccessibilityFeatureClassSnapshot.swift | 1 + .../LocalDataset/DatasetDecoder.swift | 1 + .../LocalDataset/DatasetEncoder.swift | 1 + .../LocalDataset/Mesh/MeshCoder.swift | 1 + .../CurrentMappedFeaturesData.swift | 1 + .../Definitions/CurrentMappingData.swift | 10 +-- .../Shared/SharedAppConstants.swift | 2 +- .../Shared/SharedAppContext.swift | 1 + .../TDEI/Config/APIConstants.swift | 2 +- .../TDEI/OSW/OSWGeometry.swift | 4 ++ .../ARCamera/Helpers/CameraOrientation.swift | 14 ++-- .../ARCamera/Helpers/FrameRasterizer.swift | 5 +- .../ARCamera/Utils/ARCameraUtils.swift | 4 +- .../AttributeEstimationPipeline.swift | 31 +------- .../Location/LocationExtension.swift | 4 +- .../Components/FeatureGeometry.swift | 28 ++++++++ .../Config/AccessibilityFeatureConfig.swift | 29 ++++---- .../Config/AccessibilityFeatureKind.swift | 29 ++++++++ .../Config/MapillaryCustom11ClassConfig.swift | 7 +- .../Definitions/AccessibilityFeature.swift | 6 +- .../Annotation/AnnotationOption.swift | 14 ++-- .../Mesh/Definitions/MeshDefinitions.swift | 9 +++ .../ComputerVision/Mesh/MeshGPUSnapshot.swift | 4 +- .../Mesh/Utils/MeshRasterizer.swift | 1 - .../SegmentationModelRequestProcessor.swift | 4 +- 34 files changed, 202 insertions(+), 153 deletions(-) create mode 100644 IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ARCamera/Helpers/CameraOrientation.swift (88%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ARCamera/Helpers/FrameRasterizer.swift (88%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ARCamera/Utils/ARCameraUtils.swift (80%) create mode 100644 PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Annotation/AnnotationOption.swift (59%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 3f82dfd3..d8ebd115 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -30,6 +30,7 @@ A312FE162FA3EBE80044808E /* PointNMapShaderTypes.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */; }; + A312FF2B2FA436CB0044808E /* IsExistingExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF2A2FA436C60044808E /* IsExistingExtension.swift */; }; A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */; }; @@ -51,7 +52,6 @@ A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */; }; A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */; }; A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */; }; - A35BB2862DC30386009A3FE0 /* CameraOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */; }; A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */; }; A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */; }; A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05192EDFB015003C26CF /* OSMPayload.swift */; }; @@ -77,7 +77,6 @@ A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */ = {isa = PBXBuildFile; productRef = A3C22FD72CF2F0C300533BF7 /* DequeModule */; }; - A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */; }; A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3D78D752E654F14003BFE78 /* ProfileView.swift */; }; A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3E162772F3AFC63002D4D08 /* MeshCoder.swift */; }; @@ -100,9 +99,7 @@ A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FE166B2E1C29C800DAE5BE /* OtherDetailsCoder.swift */; }; A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */; }; A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */; }; - A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA772DE01634002B99BD /* ARCameraUtils.swift */; }; A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */; }; - A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA7F2DE444C3002B99BD /* AnnotationOption.swift */; }; A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */; }; CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */ = {isa = PBXBuildFile; fileRef = CA924A922CEB9AB000FCA928 /* ChangesetService.swift */; }; CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CAA947752CDE6FBB000C6918 /* LoginView.swift */; }; @@ -208,6 +205,7 @@ A312FD852FA3391C0044808E /* PointNMapSharedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PointNMapSharedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShaderTypes.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureKindExtension.swift; sourceTree = ""; }; + A312FF2A2FA436C60044808E /* IsExistingExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IsExistingExtension.swift; sourceTree = ""; }; A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWGeometry.swift; sourceTree = ""; }; @@ -229,7 +227,6 @@ A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageViewController.swift; sourceTree = ""; }; A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceSelectionView.swift; sourceTree = ""; }; A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceService.swift; sourceTree = ""; }; - A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraOrientation.swift; sourceTree = ""; }; A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InvalidContentView.swift; sourceTree = ""; }; A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIChangesetUploadController.swift; sourceTree = ""; }; A35E05192EDFB015003C26CF /* OSMPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMPayload.swift; sourceTree = ""; }; @@ -256,7 +253,6 @@ A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; - A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; A3D78D732E65108A003BFE78 /* WorkspaceViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceViewModel.swift; sourceTree = ""; }; A3D78D752E654F14003BFE78 /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = ""; }; A3DA4DB42EBAE101005BB812 /* IOSAccessAssessment-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "IOSAccessAssessment-Bridging-Header.h"; sourceTree = ""; }; @@ -281,9 +277,7 @@ A3FE166B2E1C29C800DAE5BE /* OtherDetailsCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherDetailsCoder.swift; sourceTree = ""; }; A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationEncoder.swift; sourceTree = ""; }; A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraManager.swift; sourceTree = ""; }; - A3FFAA772DE01634002B99BD /* ARCameraUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraUtils.swift; sourceTree = ""; }; A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraView.swift; sourceTree = ""; }; - A3FFAA7F2DE444C3002B99BD /* AnnotationOption.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationOption.swift; sourceTree = ""; }; A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_53_640_640.mlpackage; sourceTree = ""; }; CA924A922CEB9AB000FCA928 /* ChangesetService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChangesetService.swift; sourceTree = ""; }; CAA947752CDE6FBB000C6918 /* LoginView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoginView.swift; sourceTree = ""; }; @@ -299,9 +293,6 @@ /* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { isa = PBXFileSystemSynchronizedBuildFileExceptionSet; - membershipExceptions = ( - Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, - ); platformFiltersByRelativePath = { PointNMapShared.h = ( ios, @@ -320,17 +311,10 @@ ); target = A312FE0C2FA3EBE80044808E /* PointNMapShaderTypes */; }; - A312FE932FA3F6860044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */ = { - isa = PBXFileSystemSynchronizedBuildFileExceptionSet; - membershipExceptions = ( - Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift, - ); - target = 3222F9152B622DFD0019A079 /* IOSAccessAssessment */; - }; /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ - A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE932FA3F6860044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; + A312FD7C2FA3391B0044808E /* PointNMapShared */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FDE12FA3DF580044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShared; sourceTree = ""; }; A312FD8B2FA3391C0044808E /* PointNMapSharedTests */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = PointNMapSharedTests; sourceTree = ""; }; A312FE0E2FA3EBE80044808E /* PointNMapShaderTypes */ = {isa = PBXFileSystemSynchronizedRootGroup; exceptions = (A312FE172FA3EBE80044808E /* PBXFileSystemSynchronizedBuildFileExceptionSet */, ); explicitFileTypes = {}; explicitFolders = (); path = PointNMapShaderTypes; sourceTree = ""; }; /* End PBXFileSystemSynchronizedRootGroup section */ @@ -518,9 +502,9 @@ A30801512EC0984F00B1BA3A /* AccessibilityFeature */ = { isa = PBXGroup; children = ( + A312FF282FA4362C0044808E /* AttributeEstimation */, A38338C32EDAF25400F1A402 /* Attributes */, A30801662EC0AE6B00B1BA3A /* Components */, - A30801622EC0A89E00B1BA3A /* Definitions */, A30801552EC09BA200B1BA3A /* Config */, ); path = AccessibilityFeature; @@ -535,13 +519,6 @@ path = Config; sourceTree = ""; }; - A30801622EC0A89E00B1BA3A /* Definitions */ = { - isa = PBXGroup; - children = ( - ); - path = Definitions; - sourceTree = ""; - }; A30801662EC0AE6B00B1BA3A /* Components */ = { isa = PBXGroup; children = ( @@ -557,6 +534,22 @@ name = Frameworks; sourceTree = ""; }; + A312FF282FA4362C0044808E /* AttributeEstimation */ = { + isa = PBXGroup; + children = ( + A312FF292FA436390044808E /* Extensions */, + ); + path = AttributeEstimation; + sourceTree = ""; + }; + A312FF292FA436390044808E /* Extensions */ = { + isa = PBXGroup; + children = ( + A312FF2A2FA436C60044808E /* IsExistingExtension.swift */, + ); + path = Extensions; + sourceTree = ""; + }; A31A1E772EAC49E3008B30B7 /* UI */ = { isa = PBXGroup; children = ( @@ -615,8 +608,6 @@ A34B70CC2DDFE638007B191F /* ARCamera */ = { isa = PBXGroup; children = ( - A3FFAA762DE01631002B99BD /* Helpers */, - A3DA4DC12EBE87B6005BB812 /* Utils */, A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */, A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */, A3EE6E4D2F5A258700F515E6 /* TestCameraManager.swift */, @@ -835,14 +826,6 @@ path = Helpers; sourceTree = ""; }; - A3DA4DC12EBE87B6005BB812 /* Utils */ = { - isa = PBXGroup; - children = ( - A3FFAA772DE01634002B99BD /* ARCameraUtils.swift */, - ); - path = Utils; - sourceTree = ""; - }; A3E162762F3AFC51002D4D08 /* Mesh */ = { isa = PBXGroup; children = ( @@ -856,7 +839,6 @@ children = ( A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */, A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */, - A3FFAA7F2DE444C3002B99BD /* AnnotationOption.swift */, ); path = Annotation; sourceTree = ""; @@ -878,15 +860,6 @@ path = DepthEstimation; sourceTree = ""; }; - A3FFAA762DE01631002B99BD /* Helpers */ = { - isa = PBXGroup; - children = ( - A35BB2852DC30383009A3FE0 /* CameraOrientation.swift */, - A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */, - ); - path = Helpers; - sourceTree = ""; - }; CAA947772CDE7001000C6918 /* Auth */ = { isa = PBXGroup; children = ( @@ -1176,7 +1149,6 @@ files = ( A3FE16632E18BAEB00DAE5BE /* ConfidenceEncoder.swift in Sources */, A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */, - A3FFAA802DE444C6002B99BD /* AnnotationOption.swift in Sources */, CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */, A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */, A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */, @@ -1203,10 +1175,10 @@ A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */, CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, A305B06C2E18A85F00ECCF9B /* DepthCoder.swift in Sources */, + A312FF2B2FA436CB0044808E /* IsExistingExtension.swift in Sources */, A308016C2EC15CC400B1BA3A /* AccessibilityFeatureAttributeExtension.swift in Sources */, A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */, A36C6E022E134CE600A86004 /* bisenetv2_35_640_640.mlpackage in Sources */, - A35BB2862DC30386009A3FE0 /* CameraOrientation.swift in Sources */, A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */, A3FE16612E18BA5900DAE5BE /* RGBCoder.swift in Sources */, CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */, @@ -1256,14 +1228,12 @@ A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, - A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */, A374B4AC2F8EF654003E030D /* CurrentMappingData.swift in Sources */, A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */, A3FE16672E18C81800DAE5BE /* LocationCoder.swift in Sources */, A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */, A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */, A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */, - A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift new file mode 100644 index 00000000..3658a245 --- /dev/null +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift @@ -0,0 +1,36 @@ +// +// IsExistingExtension.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// +import PointNMapShared +import CoreLocation +import MapKit + +extension AttributeEstimationPipeline { + public func processIsExistingRequest( + deviceLocation: CLLocationCoordinate2D, + mappingData: CurrentMappingData, + accessibilityFeature: EditableAccessibilityFeature + ) { + /// Threshold needs to be in Map Units + let distanceThreshold = PointNMapConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) + guard let LocationDetails = accessibilityFeature.locationDetails else { + accessibilityFeature.setIsExisting(false) + return + } + let matchedElement: (any OSWElement)? = mappingData.getMatchedFeature( + to: LocationDetails, featureClass: accessibilityFeature.accessibilityFeatureClass, + captureId: self.captureImageData?.id, + distanceThreshold: distanceThreshold + ) + guard let matchedElement = matchedElement else { + accessibilityFeature.setIsExisting(false) + return + } + let isExisting = accessibilityFeature.accessibilityFeatureClass.kind?.oswPolicy.isExistingFirst ?? false + accessibilityFeature.setIsExisting(isExisting) + accessibilityFeature.setOSWElement(oswElement: matchedElement) + } +} diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift index cdffea23..58038049 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/AccessibilityFeatureKindExtension.swift @@ -7,6 +7,9 @@ import PointNMapShared +/** + Extension to add mapping-related logic to AccessibilityFeatureKind. + */ extension AccessibilityFeatureKind { var oswPolicy: OSWPolicy { switch self { diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesClassConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesClassConfig.swift index 7c1daa25..27443ee8 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesClassConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesClassConfig.swift @@ -6,7 +6,7 @@ // import Foundation import CoreImage - +import PointNMapShared extension AccessibilityFeatureConfig { static let cityscapes: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( @@ -33,14 +33,12 @@ extension AccessibilityFeatureConfig { AccessibilityFeatureClass( id: "road", name: "Road", grayscaleValue: 7.0 / 255.0, labelValue: 7, color: CIColor(red: 0.502, green: 0.251, blue: 0.502), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "sidewalk", name: "Sidewalk", grayscaleValue: 8.0 / 255.0, labelValue: 8, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "parking", name: "Parking", grayscaleValue: 9.0 / 255.0, labelValue: 9, diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesSubsetClassConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesSubsetClassConfig.swift index 2a983176..8a74438e 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesSubsetClassConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CityscapesSubsetClassConfig.swift @@ -6,7 +6,7 @@ // import Foundation import CoreImage - +import PointNMapShared extension AccessibilityFeatureConfig { static let cityscapesSubset: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( @@ -16,13 +16,11 @@ extension AccessibilityFeatureConfig { id: "road", name: "Road", grayscaleValue: 0.0 / 255.0, labelValue: 0, color: CIColor(red: 0.502, green: 0.251, blue: 0.502), bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing ), AccessibilityFeatureClass( id: "sidewalk", name: "Sidewalk", grayscaleValue: 1.0 / 255.0, labelValue: 1, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing ), AccessibilityFeatureClass( id: "building", name: "Building", grayscaleValue: 2.0 / 255.0, labelValue: 2, diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom35ClassConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom35ClassConfig.swift index 648f7379..00b4648d 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom35ClassConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom35ClassConfig.swift @@ -6,6 +6,7 @@ // import Foundation import CoreImage +import PointNMapShared extension AccessibilityFeatureConfig { static let cocoCustom35Config: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( @@ -14,14 +15,12 @@ extension AccessibilityFeatureConfig { AccessibilityFeatureClass( id: "road", name: "Road", grayscaleValue: 27.0 / 255.0, labelValue: 27, color: CIColor(red: 0.502, green: 0.251, blue: 0.502), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "sidewalk", name: "Sidewalk", grayscaleValue: 22.0 / 255.0, labelValue: 22, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "building", name: "Building", grayscaleValue: 16.0 / 255.0, labelValue: 16, diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom53ClassConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom53ClassConfig.swift index 19a38f5b..8b9f1781 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom53ClassConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/Others/CocoCustom53ClassConfig.swift @@ -6,8 +6,7 @@ // import Foundation import CoreImage - - +import PointNMapShared extension AccessibilityFeatureConfig { static let cocoCustom53Config: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( @@ -16,14 +15,12 @@ extension AccessibilityFeatureConfig { AccessibilityFeatureClass( id: "road", name: "Road", grayscaleValue: 41.0 / 255.0, labelValue: 41, color: CIColor(red: 0.502, green: 0.251, blue: 0.502), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "sidewalk", name: "Sidewalk", grayscaleValue: 35.0 / 255.0, labelValue: 35, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), - bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing + bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4) ), AccessibilityFeatureClass( id: "building", name: "Building", grayscaleValue: 19.0 / 255.0, labelValue: 19, diff --git a/IOSAccessAssessment/AccessibilityFeature/Config/Others/VOCClassConfig.swift b/IOSAccessAssessment/AccessibilityFeature/Config/Others/VOCClassConfig.swift index a7095f9d..0844d119 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Config/Others/VOCClassConfig.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Config/Others/VOCClassConfig.swift @@ -6,6 +6,7 @@ // import Foundation import CoreImage +import PointNMapShared extension AccessibilityFeatureConfig { @@ -60,7 +61,6 @@ extension AccessibilityFeatureConfig { id: "person", name: "Person", grayscaleValue: 180.0 / 255.0, labelValue: 180, color: CIColor(red: 0.750, green: 0.500, blue: 0.500), bounds: CGRect(x: 0.0, y: 0.1, width: 1.0, height: 0.4), - oswPolicy: OSWPolicy(oswElementClass: .Sidewalk, isExistingFirst: true), // Temporarily set for testing ), AccessibilityFeatureClass( id: "pottedplant", name: "PottedPlant", grayscaleValue: 192.0 / 255.0, labelValue: 192, diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift index 0f8da17d..0f9e22e9 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift @@ -6,6 +6,7 @@ // import Foundation +import PointNMapShared enum AccessibilityFeatureEncoderError: Error, LocalizedError { case fileCreationFailed diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift index 50affd19..ae216472 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift @@ -7,6 +7,7 @@ import Foundation import CoreLocation +import PointNMapShared struct AccessibilityFeatureClassSnapshot: Codable, Identifiable, Sendable { let id: String diff --git a/IOSAccessAssessment/LocalDataset/DatasetDecoder.swift b/IOSAccessAssessment/LocalDataset/DatasetDecoder.swift index b6a08fb7..621f3367 100644 --- a/IOSAccessAssessment/LocalDataset/DatasetDecoder.swift +++ b/IOSAccessAssessment/LocalDataset/DatasetDecoder.swift @@ -9,6 +9,7 @@ import Foundation import ARKit import CryptoKit import CoreLocation +import PointNMapShared enum DatasetDecoderError: Error, LocalizedError { case directoryRetrievalFailed diff --git a/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift b/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift index c077d74d..9d743d20 100644 --- a/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift @@ -9,6 +9,7 @@ import Foundation import ARKit import CryptoKit import CoreLocation +import PointNMapShared enum DatasetEncoderError: Error, LocalizedError { case directoryCreationFailed diff --git a/IOSAccessAssessment/LocalDataset/Mesh/MeshCoder.swift b/IOSAccessAssessment/LocalDataset/Mesh/MeshCoder.swift index 7c8d017c..6045d43b 100644 --- a/IOSAccessAssessment/LocalDataset/Mesh/MeshCoder.swift +++ b/IOSAccessAssessment/LocalDataset/Mesh/MeshCoder.swift @@ -9,6 +9,7 @@ import Foundation import Accelerate import ARKit import RealityKit +import PointNMapShared enum MeshCoderError: Error, LocalizedError { case modelEntityHasNoModel diff --git a/IOSAccessAssessment/Shared/Definitions/CurrentMappedFeaturesData.swift b/IOSAccessAssessment/Shared/Definitions/CurrentMappedFeaturesData.swift index 3ea81d1e..10064f13 100644 --- a/IOSAccessAssessment/Shared/Definitions/CurrentMappedFeaturesData.swift +++ b/IOSAccessAssessment/Shared/Definitions/CurrentMappedFeaturesData.swift @@ -6,6 +6,7 @@ // import Foundation +import PointNMapShared enum CurrentMappedFeaturesDataError: Error, LocalizedError { case accessibilityFeatureClassNotWay(AccessibilityFeatureClass) diff --git a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift index 1a272a49..6a4f502b 100644 --- a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift +++ b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift @@ -74,7 +74,7 @@ class CurrentMappingData: CustomStringConvertible { } for featureClass in accessibilityFeatureClasses { - let oswElementClass = featureClass.oswPolicy.oswElementClass + let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass let geometry = oswElementClass.geometry let identifyingFieldTags: [String: String] = oswElementClass.identifyingFieldTags @@ -142,7 +142,7 @@ class CurrentMappingData: CustomStringConvertible { Updates the features map for a specific accessibility feature class by adding or replacing the features related that class with the provided elements. This function can be used to incrementally update the features map when new data is available for a specific feature class, without needing to rebuild the entire map from scratch. */ func updateFeatures(_ elements: [any OSWElement], for featureClass: AccessibilityFeatureClass) { - let oswElementClass = featureClass.oswPolicy.oswElementClass + let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass let geometry = oswElementClass.geometry var featureIds = featuresMap[featureClass] ?? [] @@ -180,7 +180,8 @@ class CurrentMappingData: CustomStringConvertible { guard let featureIds = featuresMap[featureClass] else { return nil } var nearestFeature: (any OSWElement)? var nearestDistance: CLLocationDistance = distanceThreshold - let geometry = featureClass.oswPolicy.oswElementClass.geometry + let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let geometry = oswElementClass.geometry for featureId in featureIds { guard let feature = getFeature(featureId: featureId, geometry: geometry) else { continue } @@ -207,7 +208,8 @@ class CurrentMappingData: CustomStringConvertible { ) -> (any OSWElement)? { guard let featureIds = featuresMap[featureClass] else { return nil } var nearestFeature: (any OSWElement)? - let geometry = featureClass.oswPolicy.oswElementClass.geometry + let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let geometry = oswElementClass.geometry let captureIdString = captureId.uuidString for featureId in featureIds { diff --git a/IOSAccessAssessment/Shared/SharedAppConstants.swift b/IOSAccessAssessment/Shared/SharedAppConstants.swift index 67dcdc39..63660f7c 100644 --- a/IOSAccessAssessment/Shared/SharedAppConstants.swift +++ b/IOSAccessAssessment/Shared/SharedAppConstants.swift @@ -9,7 +9,7 @@ import SwiftUI import PointNMapShared /** - Global SharedAppConstants used across the app. + Global Constants used across the app. */ struct SharedAppConstants { // Supported Classes diff --git a/IOSAccessAssessment/Shared/SharedAppContext.swift b/IOSAccessAssessment/Shared/SharedAppContext.swift index ea7d4dde..e9cbdd54 100644 --- a/IOSAccessAssessment/Shared/SharedAppContext.swift +++ b/IOSAccessAssessment/Shared/SharedAppContext.swift @@ -4,6 +4,7 @@ // // Created by Himanshu on 11/24/25. // +import PointNMapShared final class SharedAppContext: ObservableObject { var metalContext: MetalContext? diff --git a/IOSAccessAssessment/TDEI/Config/APIConstants.swift b/IOSAccessAssessment/TDEI/Config/APIConstants.swift index 7dfd5508..714266f7 100644 --- a/IOSAccessAssessment/TDEI/Config/APIConstants.swift +++ b/IOSAccessAssessment/TDEI/Config/APIConstants.swift @@ -1,5 +1,5 @@ // -// SharedAppConstants.swift +// APIConstants.swift // IOSAccessAssessment // // Created by Himanshu on 5/18/25. diff --git a/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift b/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift index ba7c1fbe..0c91d828 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift @@ -5,6 +5,10 @@ // Created by Himanshu on 12/8/25. // +/** + - WARNING: We may want to merge this with the FeatureGeometry enum in the future. + Else, we will have a disconnect between the geometry of the AccessibilityFeature and the geometry of the associated OSW element. + */ enum OSWGeometry: String, CaseIterable, Hashable, Codable { case point case linestring diff --git a/IOSAccessAssessment/ARCamera/Helpers/CameraOrientation.swift b/PointNMapShared/Sources/PointNMap/ARCamera/Helpers/CameraOrientation.swift similarity index 88% rename from IOSAccessAssessment/ARCamera/Helpers/CameraOrientation.swift rename to PointNMapShared/Sources/PointNMap/ARCamera/Helpers/CameraOrientation.swift index 3ba729f3..2ff43500 100644 --- a/IOSAccessAssessment/ARCamera/Helpers/CameraOrientation.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/Helpers/CameraOrientation.swift @@ -10,9 +10,9 @@ import UIKit /** A class that contains helper methods to manage camera orientation related tasks. */ -class CameraOrientation { +public class CameraOrientation { - static func getCGImageOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation { + public static func getCGImageOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation { switch currentDeviceOrientation { case .portrait: return .right // Camera is rotated 90° CW to be upright @@ -27,7 +27,7 @@ class CameraOrientation { } } - static func getCGImageReverseOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation { + public static func getCGImageReverseOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation { switch currentDeviceOrientation { case .portrait: return .left // Camera is rotated 90° CCW to revert to original orientation @@ -42,7 +42,7 @@ class CameraOrientation { } } - static func getUIImageOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> UIImage.Orientation { + public static func getUIImageOrientationForBackCamera(currentDeviceOrientation: UIDeviceOrientation) -> UIImage.Orientation { switch currentDeviceOrientation { case .portrait: return .right @@ -57,7 +57,7 @@ class CameraOrientation { } } - static func getCGImageOrientationForInterface(currentInterfaceOrientation: UIInterfaceOrientation) -> CGImagePropertyOrientation { + public static func getCGImageOrientationForInterface(currentInterfaceOrientation: UIInterfaceOrientation) -> CGImagePropertyOrientation { switch currentInterfaceOrientation { case .portrait: return .right // Camera is rotated 90° CW to be upright @@ -74,12 +74,12 @@ class CameraOrientation { // Since people tend to hold devices in portrait mode by default when using the camera, // we can assume that the camera is in portrait mode when the device orientation is unknown. - static func isLandscapeOrientation(currentDeviceOrientation: UIDeviceOrientation) -> Bool { + public static func isLandscapeOrientation(currentDeviceOrientation: UIDeviceOrientation) -> Bool { return currentDeviceOrientation == .landscapeLeft || currentDeviceOrientation == .landscapeRight } } -extension CGImagePropertyOrientation { +public extension CGImagePropertyOrientation { func inverted() -> CGImagePropertyOrientation { switch self { case .up: return .up diff --git a/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift b/PointNMapShared/Sources/PointNMap/ARCamera/Helpers/FrameRasterizer.swift similarity index 88% rename from IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift rename to PointNMapShared/Sources/PointNMap/ARCamera/Helpers/FrameRasterizer.swift index 35215004..272fb523 100644 --- a/IOSAccessAssessment/ARCamera/Helpers/FrameRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/Helpers/FrameRasterizer.swift @@ -7,16 +7,15 @@ import CoreImage import UIKit -import PointNMapShared /** A custom Image that displays a bounding box around the region of processing */ -struct FrameRasterizer { +public struct FrameRasterizer { /** This function creates a CGImage with a bounding box drawn on it. */ - static func create(imageSize: CGSize, frameSize: CGSize) -> CGImage? { + public static func create(imageSize: CGSize, frameSize: CGSize) -> CGImage? { UIGraphicsBeginImageContextWithOptions(imageSize, false, 1.0) guard let context = UIGraphicsGetCurrentContext() else { return nil } diff --git a/IOSAccessAssessment/ARCamera/Utils/ARCameraUtils.swift b/PointNMapShared/Sources/PointNMap/ARCamera/Utils/ARCameraUtils.swift similarity index 80% rename from IOSAccessAssessment/ARCamera/Utils/ARCameraUtils.swift rename to PointNMapShared/Sources/PointNMap/ARCamera/Utils/ARCameraUtils.swift index b58d908b..07af66d9 100644 --- a/IOSAccessAssessment/ARCamera/Utils/ARCameraUtils.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/Utils/ARCameraUtils.swift @@ -7,8 +7,8 @@ import ARKit -class ARCameraUtils { - static func checkDepthSupport() -> Bool { +public class ARCameraUtils { + public static func checkDepthSupport() -> Bool { // Check if LiDAR is available on the device return ARWorldTrackingConfiguration.supportsFrameSemantics(.sceneDepth) || ARWorldTrackingConfiguration.supportsFrameSemantics(.smoothedSceneDepth) diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index 78c801a6..a4050827 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -110,7 +110,7 @@ public class AttributeEstimationPipeline: ObservableObject { public func setPrerequisites( accessibilityFeature: EditableAccessibilityFeature ) throws { - let oswElementClass = accessibilityFeature.accessibilityFeatureClass.oswPolicy.oswElementClass + let accessibilityFeatureKind = accessibilityFeature.accessibilityFeatureClass.kind let isMeshEnabled: Bool = captureMeshData != nil var worldPoints: [WorldPoint]? = nil var worldPointsGrid: WorldPointsGrid? = nil @@ -121,8 +121,8 @@ public class AttributeEstimationPipeline: ObservableObject { var meshTriangles: [MeshTriangle]? = nil var meshAlignedPlane: Plane? = nil var meshProjectedPlane: ProjectedPlane? = nil - switch(oswElementClass) { - case .Sidewalk: + switch(accessibilityFeatureKind) { + case .sidewalk: if isMeshEnabled { meshContents = try self.getMeshContents(accessibilityFeature: accessibilityFeature) meshPolygons = meshContents?.polygons @@ -211,31 +211,6 @@ public class AttributeEstimationPipeline: ObservableObject { } - public func processIsExistingRequest( - deviceLocation: CLLocationCoordinate2D, - mappingData: CurrentMappingData, - accessibilityFeature: EditableAccessibilityFeature - ) { - /// Threshold needs to be in Map Units - let distanceThreshold = PointNMapConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) - guard let LocationDetails = accessibilityFeature.locationDetails else { - accessibilityFeature.setIsExisting(false) - return - } - let matchedElement: (any OSWElement)? = mappingData.getMatchedFeature( - to: LocationDetails, featureClass: accessibilityFeature.accessibilityFeatureClass, - captureId: self.captureImageData?.id, - distanceThreshold: distanceThreshold - ) - guard let matchedElement = matchedElement else { - accessibilityFeature.setIsExisting(false) - return - } - let isExisting = accessibilityFeature.accessibilityFeatureClass.oswPolicy.isExistingFirst - accessibilityFeature.setIsExisting(isExisting) - accessibilityFeature.setOSWElement(oswElement: matchedElement) - } - public func processAttributeRequest( accessibilityFeature: EditableAccessibilityFeature ) throws { diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift index 84d598df..bb3c885d 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift @@ -6,7 +6,6 @@ // import SwiftUI import CoreLocation -import PointNMapShared public extension AttributeEstimationPipeline { func calculateLocation( @@ -14,8 +13,7 @@ public extension AttributeEstimationPipeline { accessibilityFeature: EditableAccessibilityFeature ) throws -> LocationRequestResult { let isMeshEnabled: Bool = self.captureMeshData != nil - let oswElementClass = accessibilityFeature.accessibilityFeatureClass.oswPolicy.oswElementClass - let oswGeometry = oswElementClass.geometry + let oswGeometry = accessibilityFeature.accessibilityFeatureClass.kind?.geometry ?? FeatureGeometry.default switch(oswGeometry) { case .linestring: if isMeshEnabled { diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift new file mode 100644 index 00000000..260db73f --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift @@ -0,0 +1,28 @@ +// +// FeatureGeometry.swift +// IOSAccessAssessment +// +// Created by Himanshu on 4/30/26. +// + +public enum FeatureGeometry: String, CaseIterable, Hashable, Codable { + case point + case linestring + case polygon + // case multipolygon // For future use, currently not supported by OSW API + + public var description: String { + switch self { + case .point: + return "Point" + case .linestring: + return "LineString" + case .polygon: + return "Polygon" + } + } +} + +public extension FeatureGeometry { + static let `default`: FeatureGeometry = .point +} diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift index 634faced..8fedcce7 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift @@ -14,7 +14,7 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa public let kind: AccessibilityFeatureKind? /** - Segmentation-related SharedAppConstants + Segmentation-related constants */ /// Grayscale value output for the accessibility feature class, by the relevant segmentation model public let grayscaleValue: Float @@ -24,13 +24,13 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa public let color: CIColor /** - SharedAppConstants related to mesh + Constants related to mesh */ /// Optional mesh classification for the segmentation class public let meshClassification: Set /** - Post-Processing related SharedAppConstants. + Post-Processing related Constants. */ /// Optional bounds for the segmentation class. Is kept optional to prevent unnecessary dimension based masking. public let bounds: CGRect? @@ -38,13 +38,9 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa public let unionOfMasksPolicy: UnionOfMasksPolicy /// Properties related to mesh post-processing public let meshInstancePolicy: MeshInstancePolicy - /// Attributes associated with the accessibility feature class - public let attributes: Set - /// Experimental attributes associated with the accessibility feature class - public let experimentalAttributes: Set /** - Mapping-related SharedAppConstants + Mapping-related Constants */ // public let oswPolicy: OSWPolicy @@ -52,9 +48,10 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa id: String, name: String, kind: AccessibilityFeatureKind? = nil, grayscaleValue: Float, labelValue: UInt8, color: CIColor, bounds: CGRect? = nil, unionOfMasksPolicy: UnionOfMasksPolicy = .default, - meshClassification: Set = [], meshInstancePolicy: MeshInstancePolicy = .default, - attributes: Set = [], - experimentalAttributes: Set = [], + meshClassification: Set = [], + meshInstancePolicy: MeshInstancePolicy = .default, +// attributes: Set = [], +// experimentalAttributes: Set = [], // oswPolicy: OSWPolicy = .default ) { self.id = id @@ -67,8 +64,8 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa self.unionOfMasksPolicy = unionOfMasksPolicy self.meshClassification = meshClassification self.meshInstancePolicy = meshInstancePolicy - self.attributes = attributes - self.experimentalAttributes = experimentalAttributes +// self.attributes = attributes +// self.experimentalAttributes = experimentalAttributes // self.oswPolicy = oswPolicy } @@ -86,6 +83,12 @@ public struct AccessibilityFeatureClassConfig { public let classes: [AccessibilityFeatureClass] public let inputSize: CGSize + public init(modelURL: URL?, classes: [AccessibilityFeatureClass], inputSize: CGSize) { + self.modelURL = modelURL + self.classes = classes + self.inputSize = inputSize + } + public var classNames: [String] { return classes.map { $0.name } } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift index bf492ed7..3b8a7855 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift @@ -19,4 +19,33 @@ public enum AccessibilityFeatureKind: String, Identifiable, Codable, CaseIterabl public var id: String { return self.rawValue } + + public var geometry: FeatureGeometry { + switch self { + case .sidewalk: return .linestring + case .building: return .polygon + case .pole, .trafficLight, .trafficSign: return .point + default: return .point + } + } + + /// Attributes associated with the accessibility feature class + public var attributes: Set + { + switch self { + case .sidewalk: return [ + .width, .runningSlope, .crossSlope, .surfaceIntegrity, + .widthLegacy, .runningSlopeLegacy, .crossSlopeLegacy, + .widthFromImage, .runningSlopeFromImage, .crossSlopeFromImage + ] + default : return [] + } + } + + /// Experimental attributes associated with the accessibility feature class + public var experimentalAttributes: Set { + switch self { + default : return [] + } + } } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift index 94f2f44c..d6741648 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift @@ -22,12 +22,7 @@ public extension AccessibilityFeatureConfig { id: "sidewalk", name: "Sidewalk", kind: .sidewalk, grayscaleValue: 1.0 / 255.0, labelValue: 1, color: CIColor(red: 0.957, green: 0.137, blue: 0.910), - meshClassification: [.floor], - attributes: [ - .width, .runningSlope, .crossSlope, .surfaceIntegrity, - .widthLegacy, .runningSlopeLegacy, .crossSlopeLegacy, - .widthFromImage, .runningSlopeFromImage, .crossSlopeFromImage - ], + meshClassification: [.floor] ), AccessibilityFeatureClass( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift index 315d0290..0bdabe80 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/AccessibilityFeature.swift @@ -7,10 +7,10 @@ import Foundation import CoreLocation -enum AccessibilityFeatureError: Error, LocalizedError { +public enum AccessibilityFeatureError: Error, LocalizedError { case attributeValueMismatch(attribute: AccessibilityFeatureAttribute, value: AccessibilityFeatureAttribute.Value) - var errorDescription: String? { + public var errorDescription: String? { switch self { case .attributeValueMismatch(let attribute, let value): return "The value \(value) does not match the expected type for attribute \(attribute)." @@ -18,7 +18,7 @@ enum AccessibilityFeatureError: Error, LocalizedError { } } -protocol AccessibilityFeatureProtocol: Identifiable, Equatable { +public protocol AccessibilityFeatureProtocol: Identifiable, Equatable { var id: UUID { get } var accessibilityFeatureClass: AccessibilityFeatureClass { get } diff --git a/IOSAccessAssessment/Annotation/AnnotationOption.swift b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationOption.swift similarity index 59% rename from IOSAccessAssessment/Annotation/AnnotationOption.swift rename to PointNMapShared/Sources/PointNMap/Annotation/AnnotationOption.swift index 5140cf32..8a0243f9 100644 --- a/IOSAccessAssessment/Annotation/AnnotationOption.swift +++ b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationOption.swift @@ -5,29 +5,29 @@ // Created by Himanshu on 5/25/25. // -protocol AnnotationOptionProtocol: RawRepresentable, CaseIterable, Hashable where RawValue == String {} +public protocol AnnotationOptionProtocol: RawRepresentable, CaseIterable, Hashable where RawValue == String {} -enum AnnotationOptionFeature: String, CaseIterable, Hashable, AnnotationOptionProtocol { +public enum AnnotationOptionFeature: String, CaseIterable, Hashable, AnnotationOptionProtocol { case agree = "I agree with this feature annotation" case discard = "I wish to discard this feature annotation" - static let `default` = AnnotationOptionFeature.agree + public static let `default` = AnnotationOptionFeature.agree } -enum AnnotationOptionFeatureClass: String, CaseIterable, Hashable, AnnotationOptionProtocol { +public enum AnnotationOptionFeatureClass: String, CaseIterable, Hashable, AnnotationOptionProtocol { case agree = "I agree with this class annotation" case missingInstances = "Annotation is missing some instances" // case misidentified = "The class annotation is misidentified" case discard = "I wish to discard this class annotation" - static let `default` = AnnotationOptionFeatureClass.agree + public static let `default` = AnnotationOptionFeatureClass.agree } -enum AnnotationOption: Hashable { +public enum AnnotationOption: Hashable { case individualOption(AnnotationOptionFeature) case classOption(AnnotationOptionFeatureClass) - var rawValue: String { + public var rawValue: String { switch self { case .individualOption(let option): return option.rawValue case .classOption(let option): return option.rawValue diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift index 2e159309..ffd57af3 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Definitions/MeshDefinitions.swift @@ -17,6 +17,15 @@ public struct MeshContents: Sendable { public var colorG8: Int public var colorB8: Int + public init(positions: [packed_float3], indices: [UInt32], classifications: [UInt8]? = nil, colorR8: Int, colorG8: Int, colorB8: Int) { + self.positions = positions + self.indices = indices + self.classifications = classifications + self.colorR8 = colorR8 + self.colorG8 = colorG8 + self.colorB8 = colorB8 + } + /// - Warning: Ideally, this property should be avoided for performance reasons. public var polygons: [MeshPolygon] { var result: [MeshPolygon] = [] diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift index e339b9c1..409b0889 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/MeshGPUSnapshot.swift @@ -11,8 +11,8 @@ import RealityKit Functionality to capture ARMeshAnchor data as a GPU-friendly snapshot */ public final class MeshGPUSnapshotGenerator: NSObject { - // MARK: These SharedAppConstants can be made configurable later - // But make sure that the snapshot from MeshContents extension continues to use the original SharedAppConstants. + // MARK: These constants can be made configurable later + // But make sure that the snapshot from MeshContents extension continues to use the original constants. private let defaultBufferSize: Int = 1024 private let vertexElemSize: Int = MemoryLayout.stride * 3 private let vertexOffset: Int = 0 diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift index dda5a497..562f0259 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/Utils/MeshRasterizer.swift @@ -7,7 +7,6 @@ import CoreImage import UIKit -import PointNMapShared /** Functions to rasterize mesh triangles into an image. diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift index ecd74edc..39a1b3d1 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/Functions/SegmentationModelRequestProcessor.swift @@ -32,7 +32,7 @@ public struct SegmentationModelRequestProcessor { public var selectedClasses: [AccessibilityFeatureClass] = [] public init(selectedClasses: [AccessibilityFeatureClass]) throws { - guard let modelURL = SharedAppConstants.SelectedAccessibilityFeatureConfig.modelURL else { + guard let modelURL = PointNMapConstants.SelectedAccessibilityFeatureConfig.modelURL else { throw SegmentationModelError.modelLoadingError } let configuration: MLModelConfiguration = MLModelConfiguration() @@ -70,7 +70,7 @@ public struct SegmentationModelRequestProcessor { let uniqueGrayScaleValues = CVPixelBufferUtils.extractUniqueGrayscaleValues(from: segmentationBuffer) - let grayscaleValuesToClassMap = SharedAppConstants.SelectedAccessibilityFeatureConfig.labelToClassMap + let grayscaleValuesToClassMap = PointNMapConstants.SelectedAccessibilityFeatureConfig.labelToClassMap var segmentedClasses = uniqueGrayScaleValues.compactMap { grayscaleValuesToClassMap[$0] } let segmentedClassSet = Set(segmentedClasses) segmentedClasses = self.selectedClasses.filter{ segmentedClassSet.contains($0) } From 6db1595d5535e5faca6aec0847efc006d3d4b91c Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Fri, 1 May 2026 11:23:12 -0700 Subject: [PATCH 12/14] Add Capture and Annotation based files to framework --- IOSAccessAssessment.xcodeproj/project.pbxproj | 24 -------- .../ARCamera/TestCameraViewController.swift | 1 + .../PointNMap}/ARCamera/ARCameraManager.swift | 10 ++-- .../ARCamera/ARCameraViewController.swift | 57 ++++++++++--------- .../Annotation/AnnotationImageManager.swift | 16 +++--- .../AnnotationImageViewController.swift | 1 + .../Mesh/CapturedMeshSnapshot.swift | 4 ++ .../Shared/Definitions/CaptureData.swift | 6 ++ 8 files changed, 56 insertions(+), 63 deletions(-) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ARCamera/ARCameraManager.swift (99%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/ARCamera/ARCameraViewController.swift (90%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Annotation/AnnotationImageManager.swift (98%) rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Annotation/AnnotationImageViewController.swift (99%) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index d8ebd115..3be2bdda 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -31,7 +31,6 @@ A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */; }; A312FF2B2FA436CB0044808E /* IsExistingExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF2A2FA436C60044808E /* IsExistingExtension.swift */; }; - A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */; }; A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */; }; @@ -49,7 +48,6 @@ A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A355471D2EC1A47200F43AFD /* SharedAppData.swift */; }; A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */; }; A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CB2EC3018C00F43AFD /* AnnotationView.swift */; }; - A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */; }; A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */; }; A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */; }; A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */; }; @@ -71,7 +69,6 @@ A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */; }; A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3A2DD9B03000455E45 /* OSMElement.swift */; }; A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; - A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */; }; A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */; }; A3B61FC52F76480B0052AE2C /* EnvironmentService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */; }; A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */; }; @@ -98,7 +95,6 @@ A3FE16672E18C81800DAE5BE /* LocationCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FE16662E18C81500DAE5BE /* LocationCoder.swift */; }; A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FE166B2E1C29C800DAE5BE /* OtherDetailsCoder.swift */; }; A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */; }; - A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */; }; A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */; }; A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */; }; CA924A932CEB9AB000FCA928 /* ChangesetService.swift in Sources */ = {isa = PBXBuildFile; fileRef = CA924A922CEB9AB000FCA928 /* ChangesetService.swift */; }; @@ -206,7 +202,6 @@ A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShaderTypes.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureKindExtension.swift; sourceTree = ""; }; A312FF2A2FA436C60044808E /* IsExistingExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IsExistingExtension.swift; sourceTree = ""; }; - A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraViewController.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWGeometry.swift; sourceTree = ""; }; A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWElementClass.swift; sourceTree = ""; }; @@ -224,7 +219,6 @@ A355471D2EC1A47200F43AFD /* SharedAppData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppData.swift; sourceTree = ""; }; A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappedFeaturesData.swift; sourceTree = ""; }; A35547CB2EC3018C00F43AFD /* AnnotationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationView.swift; sourceTree = ""; }; - A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageViewController.swift; sourceTree = ""; }; A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceSelectionView.swift; sourceTree = ""; }; A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceService.swift; sourceTree = ""; }; A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InvalidContentView.swift; sourceTree = ""; }; @@ -248,7 +242,6 @@ A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationFeatureDetailView.swift; sourceTree = ""; }; A39C9F3A2DD9B03000455E45 /* OSMElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMElement.swift; sourceTree = ""; }; A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; - A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationImageManager.swift; sourceTree = ""; }; A3A739442DD4BA3B0073C7D2 /* CustomXMLParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomXMLParser.swift; sourceTree = ""; }; A3B61FC42F7647FC0052AE2C /* EnvironmentService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EnvironmentService.swift; sourceTree = ""; }; A3B61FCA2F7903660052AE2C /* OSMMapDataResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMMapDataResponse.swift; sourceTree = ""; }; @@ -276,7 +269,6 @@ A3FE166A2E18DD2A00DAE5BE /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = ""; }; A3FE166B2E1C29C800DAE5BE /* OtherDetailsCoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OtherDetailsCoder.swift; sourceTree = ""; }; A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationEncoder.swift; sourceTree = ""; }; - A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraManager.swift; sourceTree = ""; }; A3FFAA792DE01A0F002B99BD /* ARCameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARCameraView.swift; sourceTree = ""; }; A3FFAA822DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2_53_640_640.mlpackage; sourceTree = ""; }; CA924A922CEB9AB000FCA928 /* ChangesetService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChangesetService.swift; sourceTree = ""; }; @@ -406,7 +398,6 @@ A30801512EC0984F00B1BA3A /* AccessibilityFeature */, A305B05A2E1887AE00ECCF9B /* LocalDataset */, A34B70CC2DDFE638007B191F /* ARCamera */, - A3E84ECE2DDAC7980096A645 /* Annotation */, A33EB5AE2F761C83008ABFB7 /* ComputerVision */, 55659C092BB785EA0094DF01 /* MachineLearning */, 55659C0E2BB786240094DF01 /* View */, @@ -608,8 +599,6 @@ A34B70CC2DDFE638007B191F /* ARCamera */ = { isa = PBXGroup; children = ( - A3FFAA742DE00F2C002B99BD /* ARCameraManager.swift */, - A31A1E732EAC4265008B30B7 /* ARCameraViewController.swift */, A3EE6E4D2F5A258700F515E6 /* TestCameraManager.swift */, A3EE6E4F2F5A3EF100F515E6 /* TestCameraViewController.swift */, ); @@ -834,15 +823,6 @@ path = Mesh; sourceTree = ""; }; - A3E84ECE2DDAC7980096A645 /* Annotation */ = { - isa = PBXGroup; - children = ( - A3A4139F2EC86D210039298C /* AnnotationImageManager.swift */, - A35547CD2EC3048200F43AFD /* AnnotationImageViewController.swift */, - ); - path = Annotation; - sourceTree = ""; - }; A3EE6E442F57FE4400F515E6 /* TestMode */ = { isa = PBXGroup; children = ( @@ -1185,7 +1165,6 @@ A3A739452DD4BA3F0073C7D2 /* CustomXMLParser.swift in Sources */, A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */, A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */, - A35547CE2EC3048700F43AFD /* AnnotationImageViewController.swift in Sources */, A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */, CAA947792CDE700A000C6918 /* AuthService.swift in Sources */, A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */, @@ -1200,7 +1179,6 @@ A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */, CAA9477B2CDE70D9000C6918 /* KeychainService.swift in Sources */, A32943552EE8186E00C4C1BC /* OSWPoint.swift in Sources */, - A3A413A02EC86D410039298C /* AnnotationImageManager.swift in Sources */, CAA947762CDE6FBD000C6918 /* LoginView.swift in Sources */, 3222F91A2B622DFD0019A079 /* IOSAccessAssessmentApp.swift in Sources */, A32943572EE81BF700C4C1BC /* OSWLineString.swift in Sources */, @@ -1220,11 +1198,9 @@ A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, - A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */, DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */, A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, - A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, A3EE6E502F5A3EF100F515E6 /* TestCameraViewController.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, diff --git a/IOSAccessAssessment/ARCamera/TestCameraViewController.swift b/IOSAccessAssessment/ARCamera/TestCameraViewController.swift index d1aeda05..1f21e991 100644 --- a/IOSAccessAssessment/ARCamera/TestCameraViewController.swift +++ b/IOSAccessAssessment/ARCamera/TestCameraViewController.swift @@ -11,6 +11,7 @@ import ARKit import CoreImage import CoreImage.CIFilterBuiltins import simd +import PointNMapShared @MainActor protocol TestCameraProcessingOutputConsumer: AnyObject { diff --git a/IOSAccessAssessment/ARCamera/ARCameraManager.swift b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift similarity index 99% rename from IOSAccessAssessment/ARCamera/ARCameraManager.swift rename to PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift index 610a55c4..20e6bf9b 100644 --- a/IOSAccessAssessment/ARCamera/ARCameraManager.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift @@ -8,7 +8,6 @@ import ARKit import RealityKit import Combine import simd -import PointNMapShared enum ARCameraManagerError: Error, LocalizedError { case sessionConfigurationFailed @@ -237,7 +236,8 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi selectedClasses: [AccessibilityFeatureClass], segmentationPipeline: SegmentationARPipeline, metalContext: MetalContext?, isEnhancedAnalysisEnabled: Bool, - cameraOutputImageCallback: ((any CaptureImageDataProtocol) -> Void)? = nil + cameraOutputImageCallback: ((any CaptureImageDataProtocol) -> Void)? = nil, + pixelBufferPoolSize: CGSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize ) throws { self.selectedClasses = selectedClasses self.segmentationPipeline = segmentationPipeline @@ -248,7 +248,7 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi self.metalContext = metalContext self.isEnhancedAnalysisEnabled = isEnhancedAnalysisEnabled self.meshGPUSnapshotGenerator = MeshGPUSnapshotGenerator(device: metalContext.device) - try setUpPreAllocatedPixelBufferPools(size: SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize) + try setUpPreAllocatedPixelBufferPools(size: pixelBufferPoolSize) self.cameraOutputImageCallback = cameraOutputImageCallback self.isConfigured = true @@ -371,7 +371,8 @@ extension ARCameraManager { depthImage: CIImage? = nil, interfaceOrientation: UIInterfaceOrientation, cameraTransform: simd_float4x4, cameraIntrinsics: simd_float3x3, - highPriority: Bool = false + highPriority: Bool = false, + croppedSize: CGSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize ) async throws -> ARCameraImageResults { guard let cameraCroppedPixelBufferPool = cameraCroppedPixelBufferPool, let segmentationPixelBufferPool = segmentationMaskPixelBufferPool else { @@ -382,7 +383,6 @@ extension ARCameraManager { } /// Pre-process the image: orient, center-crop, and back to pixel buffer let originalSize: CGSize = image.extent.size - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) diff --git a/IOSAccessAssessment/ARCamera/ARCameraViewController.swift b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift similarity index 90% rename from IOSAccessAssessment/ARCamera/ARCameraViewController.swift rename to PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift index 9ae80aa9..8ee1fffe 100644 --- a/IOSAccessAssessment/ARCamera/ARCameraViewController.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift @@ -15,20 +15,22 @@ import simd /// The consumer of post-processed camera outputs (e.g., overlay images). @MainActor protocol ARSessionCameraProcessingOutputConsumer: AnyObject { - func cameraOutputImage(_ delegate: ARSessionCameraProcessingDelegate, - metalContext: MetalContext, - segmentationImage: CIImage?, - segmentationBoundingFrameImage: CIImage?, - for frame: ARFrame? + func cameraOutputImage( + _ delegate: ARSessionCameraProcessingDelegate, + metalContext: MetalContext, + segmentationImage: CIImage?, + segmentationBoundingFrameImage: CIImage?, + for frame: ARFrame? ) - func cameraOutputMesh(_ delegate: ARSessionCameraProcessingDelegate, - metalContext: MetalContext, - meshGPUSnapshot: MeshGPUSnapshot, - for meshAnchors: [ARMeshAnchor]?, - cameraTransform: simd_float4x4, - cameraIntrinsics: simd_float3x3, - segmentationLabelImage: CIImage, - accessibilityFeatureClasses: [AccessibilityFeatureClass] + func cameraOutputMesh( + _ delegate: ARSessionCameraProcessingDelegate, + metalContext: MetalContext, + meshGPUSnapshot: MeshGPUSnapshot, + for meshAnchors: [ARMeshAnchor]?, + cameraTransform: simd_float4x4, + cameraIntrinsics: simd_float3x3, + segmentationLabelImage: CIImage, + accessibilityFeatureClasses: [AccessibilityFeatureClass] ) func getMeshRecordDetails() -> ( records: [AccessibilityFeatureClass: SegmentationMeshRecord], @@ -333,10 +335,12 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO self.meshOtherDetails = nil } - func cameraOutputImage(_ delegate: ARSessionCameraProcessingDelegate, - metalContext: MetalContext, - segmentationImage: CIImage?, segmentationBoundingFrameImage: CIImage?, - for frame: ARFrame?) { + func cameraOutputImage( + _ delegate: ARSessionCameraProcessingDelegate, + metalContext: MetalContext, + segmentationImage: CIImage?, segmentationBoundingFrameImage: CIImage?, + for frame: ARFrame? + ) { if let segmentationImage = segmentationImage, let segmentationCGImage = metalContext.ciContext.createCGImage(segmentationImage, from: segmentationImage.extent) { self.segmentationImageView.image = UIImage(cgImage: segmentationCGImage) @@ -348,18 +352,19 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO } } - func cameraOutputMesh(_ delegate: ARSessionCameraProcessingDelegate, - metalContext: MetalContext, - meshGPUSnapshot: MeshGPUSnapshot, - for meshAnchors: [ARMeshAnchor]?, - cameraTransform: simd_float4x4, - cameraIntrinsics: simd_float3x3, - segmentationLabelImage: CIImage, - accessibilityFeatureClasses: [AccessibilityFeatureClass] + func cameraOutputMesh( + _ delegate: ARSessionCameraProcessingDelegate, + metalContext: MetalContext, + meshGPUSnapshot: MeshGPUSnapshot, + for meshAnchors: [ARMeshAnchor]?, + cameraTransform: simd_float4x4, + cameraIntrinsics: simd_float3x3, + segmentationLabelImage: CIImage, + accessibilityFeatureClasses: [AccessibilityFeatureClass] ) { var totalVertexCount = 0 for accessibilityFeatureClass in accessibilityFeatureClasses { - guard SharedAppConstants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { + guard PointNMapConstants.SelectedAccessibilityFeatureConfig.classes.contains(accessibilityFeatureClass) else { print("Invalid segmentation class: \(accessibilityFeatureClass)") continue } diff --git a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift similarity index 98% rename from IOSAccessAssessment/Annotation/AnnotationImageManager.swift rename to PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift index 238df3e4..0ff14b0c 100644 --- a/IOSAccessAssessment/Annotation/AnnotationImageManager.swift +++ b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift @@ -5,8 +5,8 @@ // Created by Himanshu on 11/15/25. // import SwiftUI -import DequeModule -import PointNMapShared +import Combine +import simd enum AnnotationImageManagerError: Error, LocalizedError { case notConfigured @@ -303,7 +303,7 @@ extension AnnotationImageManager { let cameraImage = captureImageData.cameraImage let interfaceOrientation = captureImageData.interfaceOrientation // let originalSize = captureImageData.originalSize - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -401,7 +401,7 @@ extension AnnotationImageManager { ) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -512,7 +512,7 @@ extension AnnotationImageManager { interfaceOrientation: UIInterfaceOrientation ) throws -> CIImage { let raterizedFeaturesCIImage = CIImage(cgImage: raterizedFeaturesImage) - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -550,7 +550,7 @@ extension AnnotationImageManager { let rasterizedMeshCIImage = CIImage(cgImage: rasterizedMeshImage) let interfaceOrientation = captureMeshData.interfaceOrientation - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation @@ -625,7 +625,7 @@ extension AnnotationImageManager { ) else { return nil } let rasterizedPlaneCIImage = CIImage(cgImage: rasterizedPlaneCGImage) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) @@ -651,7 +651,7 @@ extension AnnotationImageManager { ) else { return nil } let rasterizedDamageDetectionCIImage = CIImage(cgImage: rasterizedDamageDetectionCGImage) let interfaceOrientation = captureImageData.interfaceOrientation - let croppedSize = SharedAppConstants.SelectedAccessibilityFeatureConfig.inputSize + let croppedSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize let imageOrientation: CGImagePropertyOrientation = CameraOrientation.getCGImageOrientationForInterface( currentInterfaceOrientation: interfaceOrientation ) diff --git a/IOSAccessAssessment/Annotation/AnnotationImageViewController.swift b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift similarity index 99% rename from IOSAccessAssessment/Annotation/AnnotationImageViewController.swift rename to PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift index 4a495205..1ddf517a 100644 --- a/IOSAccessAssessment/Annotation/AnnotationImageViewController.swift +++ b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift @@ -6,6 +6,7 @@ // import SwiftUI +import Combine @MainActor protocol AnnotationImageProcessingOutputConsumer: AnyObject { diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift index eb4ac2f6..b7db8a83 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Mesh/CapturedMeshSnapshot.swift @@ -30,6 +30,8 @@ public enum CapturedMeshSnapshotError: Error, LocalizedError { @MainActor public final class CapturedMeshSnapshotGenerator { + public init() { } + public func snapshotSegmentationRecords( from: [AccessibilityFeatureClass: SegmentationMeshRecord], vertexStride: Int, @@ -104,6 +106,8 @@ public final class CapturedMeshSnapshotGenerator { Can be used for processing the mesh snapshot, even outside the main actor. */ public final class CapturedMeshSnapshotHelper { + public init() { } + /** TODO: Instead of simd3, use packed simd types that match the vertex format in the snapshot to avoid unnecessary conversions. */ diff --git a/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift b/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift index 96472d38..9ec884aa 100644 --- a/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/Definitions/CaptureData.swift @@ -14,6 +14,12 @@ public struct CaptureImageDataResults: Sendable { public let segmentedClasses: [AccessibilityFeatureClass] /// Map of detected accessibility features with their UUIDs. Not currently used but reserved for potential future use. public let detectedFeatureMap: [UUID: DetectedAccessibilityFeature] + + public init(segmentationLabelImage: CIImage, segmentedClasses: [AccessibilityFeatureClass], detectedFeatureMap: [UUID : DetectedAccessibilityFeature]) { + self.segmentationLabelImage = segmentationLabelImage + self.segmentedClasses = segmentedClasses + self.detectedFeatureMap = detectedFeatureMap + } } public struct CaptureMeshDataResults: Sendable { From 643cb78c45f26a6faf0419482bf64ca3ec61dde8 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Fri, 1 May 2026 16:22:38 -0700 Subject: [PATCH 13/14] Major movements and updates of ARCameraView, AnnotationView and related content --- IOSAccessAssessment.xcodeproj/project.pbxproj | 50 +- .../Extensions/IsExistingExtension.swift | 6 +- .../MappedAccessibilityFeature.swift | 35 +- .../MappedEditableAccessibilityFeature.swift | 77 +++ .../AccessibilityFeatureEncoder.swift | 8 +- .../AccessibilityFeatureSnapshot.swift | 4 +- .../Definitions/CurrentMappingData.swift | 20 +- .../TDEI/OSM/MappingGeometryExtension.swift | 22 + .../TDEI/OSW/OSWElementClass.swift | 14 +- .../TDEI/OSW/OSWGeometry.swift | 39 -- .../APIChangesetUploadController.swift | 52 +- .../Transmission/APITransmissionHelpers.swift | 1 + IOSAccessAssessment/View/AnnotationView.swift | 23 +- IOSAccessAssessment/View/SetupView.swift | 1 + .../SubView/AnnotationFeatureDetailView.swift | 476 ++----------- .../View/TestMode/TestListView.swift | 1 + .../PointNMap/ARCamera/ARCameraManager.swift | 189 +++--- .../ARCamera/ARCameraViewController.swift | 68 +- .../AttributeEstimationPipeline.swift | 10 +- .../OtherAttributeExtensionLegacy.swift | 6 +- .../Location/LocationExtension.swift | 14 +- .../Location/LocationFromImageExtension.swift | 8 +- .../Location/LocationFromMeshExtension.swift | 2 +- .../OtherAttributes/CrossSlopeExtension.swift | 6 +- .../RunninSlopeExtension.swift | 6 +- .../SurfaceIntegrityExtension.swift | 8 +- .../OtherAttributes/WidthExtension.swift | 6 +- .../Extensions/UtilityExtension.swift | 12 +- .../Config/AccessibilityFeatureConfig.swift | 4 +- .../Config/AccessibilityFeatureKind.swift | 7 +- .../EditableAccessibilityFeature.swift | 70 +- .../Annotation/AnnotationImageManager.swift | 80 ++- .../AnnotationImageViewController.swift | 36 +- .../Segmentation/SegmentationARPipeline.swift | 2 + .../SegmentationAnnotationPipeline.swift | 5 +- .../Definitions/MappingGeometry.swift} | 8 +- .../PointNMap/Shared/PointNMapConstants.swift | 2 +- .../PointNMap/Shared/SharedBaseContext.swift | 17 + .../PointNMap/Shared/SharedBaseData.swift | 44 ++ .../PointNMap}/Shared/Utils/SafeDeque.swift | 0 .../PointNMap/View/ARCameraViewBase.swift | 353 ++++++++++ .../PointNMap/View/AnnotationViewBase.swift | 623 ++++++++++++++++++ .../AnnotationFeatureDetailViewBase.swift | 394 +++++++++++ .../View/SubView/InvalidContentView.swift | 0 .../PointNMap/View/UI/CustomPicker.swift | 34 + .../PointNMap/View/UI/ProgressBar.swift | 22 + .../PointNMap/View/UI/SpinnerView.swift | 24 + 47 files changed, 2100 insertions(+), 789 deletions(-) rename {PointNMapShared/Sources/PointNMap => IOSAccessAssessment}/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift (70%) create mode 100644 IOSAccessAssessment/AccessibilityFeature/Definitions/MappedEditableAccessibilityFeature.swift create mode 100644 IOSAccessAssessment/TDEI/OSM/MappingGeometryExtension.swift delete mode 100644 IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift rename PointNMapShared/Sources/PointNMap/{AccessibilityFeature/Components/FeatureGeometry.swift => Shared/Definitions/MappingGeometry.swift} (69%) create mode 100644 PointNMapShared/Sources/PointNMap/Shared/SharedBaseContext.swift create mode 100644 PointNMapShared/Sources/PointNMap/Shared/SharedBaseData.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/Shared/Utils/SafeDeque.swift (100%) create mode 100644 PointNMapShared/Sources/PointNMap/View/ARCameraViewBase.swift create mode 100644 PointNMapShared/Sources/PointNMap/View/AnnotationViewBase.swift create mode 100644 PointNMapShared/Sources/PointNMap/View/SubView/AnnotationFeatureDetailViewBase.swift rename {IOSAccessAssessment => PointNMapShared/Sources/PointNMap}/View/SubView/InvalidContentView.swift (100%) create mode 100644 PointNMapShared/Sources/PointNMap/View/UI/CustomPicker.swift create mode 100644 PointNMapShared/Sources/PointNMap/View/UI/ProgressBar.swift create mode 100644 PointNMapShared/Sources/PointNMap/View/UI/SpinnerView.swift diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 3be2bdda..4058b90b 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -31,8 +31,11 @@ A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */; }; A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */; }; A312FF2B2FA436CB0044808E /* IsExistingExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A312FF2A2FA436C60044808E /* IsExistingExtension.swift */; }; + A31DB99E2FA54BEB00DDA385 /* DequeModule in Frameworks */ = {isa = PBXBuildFile; productRef = A31DB99D2FA54BEB00DDA385 /* DequeModule */; }; + A322B9042FA53B6F00F4D488 /* MappedEditableAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A322B9032FA53B6800F4D488 /* MappedEditableAccessibilityFeature.swift */; }; + A322B9052FA53B9B00F4D488 /* MappedAccessibilityFeature.swift in Sources */ = {isa = PBXBuildFile; fileRef = A322B8082FA5287300F4D488 /* MappedAccessibilityFeature.swift */; }; A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */; }; - A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */; }; + A32943462EE7C07E00C4C1BC /* MappingGeometryExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943452EE7C07A00C4C1BC /* MappingGeometryExtension.swift */; }; A32943482EE7C0DD00C4C1BC /* OSWElementClass.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */; }; A329434C2EE7CFE800C4C1BC /* OSWField.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329434B2EE7CFE600C4C1BC /* OSWField.swift */; }; A32943502EE80EC400C4C1BC /* OSMRelation.swift in Sources */ = {isa = PBXBuildFile; fileRef = A329434F2EE80EC200C4C1BC /* OSMRelation.swift */; }; @@ -43,14 +46,12 @@ A32D66532F7C3F2F00DC4173 /* OSWMultiPolygon.swift in Sources */ = {isa = PBXBuildFile; fileRef = A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */; }; A33EB5AB2F76080E008ABFB7 /* APIEndpoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */; }; A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */; }; - A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */ = {isa = PBXBuildFile; fileRef = A34509D72FA1A6FA003157B0 /* SafeDeque.swift */; }; A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */; }; A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A355471D2EC1A47200F43AFD /* SharedAppData.swift */; }; A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */; }; A35547CC2EC3018E00F43AFD /* AnnotationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35547CB2EC3018C00F43AFD /* AnnotationView.swift */; }; A35A8BCF2E5D0CD100CC8AA7 /* WorkspaceSelectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */; }; A35A8BD12E5D0D1200CC8AA7 /* WorkspaceService.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */; }; - A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */; }; A35E05162EDEA050003C26CF /* APIChangesetUploadController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */; }; A35E051A2EDFB017003C26CF /* OSMPayload.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E05192EDFB015003C26CF /* OSMPayload.swift */; }; A35E051C2EDFB094003C26CF /* OSMNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A35E051B2EDFB093003C26CF /* OSMNode.swift */; }; @@ -65,7 +66,6 @@ A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */; }; A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */; }; A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */; }; - A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338BE2EDA889A00F1A402 /* CustomPicker.swift */; }; A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */; }; A39C9F3B2DD9B03300455E45 /* OSMElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3A2DD9B03000455E45 /* OSMElement.swift */; }; A39C9F3D2DD9BE2800455E45 /* APIConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */; }; @@ -105,8 +105,6 @@ CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = CAF812C22CFA108100D44B84 /* UserStateViewModel.swift */; }; DA6332E72BAE3998009C80F9 /* espnetv2_pascal_256.mlmodel in Resources */ = {isa = PBXBuildFile; fileRef = 3222F94B2B62FF2E0019A079 /* espnetv2_pascal_256.mlmodel */; }; DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B42CA38C11003666D8 /* SharedAppConstants.swift */; }; - DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */; }; - DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -202,8 +200,10 @@ A312FE0D2FA3EBE80044808E /* PointNMapShaderTypes.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = PointNMapShaderTypes.framework; sourceTree = BUILT_PRODUCTS_DIR; }; A312FF222FA4304D0044808E /* AccessibilityFeatureKindExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureKindExtension.swift; sourceTree = ""; }; A312FF2A2FA436C60044808E /* IsExistingExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IsExistingExtension.swift; sourceTree = ""; }; + A322B8082FA5287300F4D488 /* MappedAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedAccessibilityFeature.swift; sourceTree = ""; }; + A322B9032FA53B6800F4D488 /* MappedEditableAccessibilityFeature.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappedEditableAccessibilityFeature.swift; sourceTree = ""; }; A329433B2EE7BEDD00C4C1BC /* OSWPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWPolicy.swift; sourceTree = ""; }; - A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWGeometry.swift; sourceTree = ""; }; + A32943452EE7C07A00C4C1BC /* MappingGeometryExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MappingGeometryExtension.swift; sourceTree = ""; }; A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWElementClass.swift; sourceTree = ""; }; A329434B2EE7CFE600C4C1BC /* OSWField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWField.swift; sourceTree = ""; }; A329434F2EE80EC200C4C1BC /* OSMRelation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMRelation.swift; sourceTree = ""; }; @@ -214,14 +214,12 @@ A32D66522F7C3F2F00DC4173 /* OSWMultiPolygon.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSWMultiPolygon.swift; sourceTree = ""; }; A33EB5AA2F76080B008ABFB7 /* APIEndpoint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEndpoint.swift; sourceTree = ""; }; A3420F1B2E8D82E400CD617E /* APIEnvironment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIEnvironment.swift; sourceTree = ""; }; - A34509D72FA1A6FA003157B0 /* SafeDeque.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SafeDeque.swift; sourceTree = ""; }; A3471B9F2DAF7ABF00FEB845 /* bisenetv2.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = bisenetv2.mlpackage; sourceTree = ""; }; A355471D2EC1A47200F43AFD /* SharedAppData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppData.swift; sourceTree = ""; }; A35547C72EC1B0D900F43AFD /* CurrentMappedFeaturesData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentMappedFeaturesData.swift; sourceTree = ""; }; A35547CB2EC3018C00F43AFD /* AnnotationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationView.swift; sourceTree = ""; }; A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceSelectionView.swift; sourceTree = ""; }; A35A8BD02E5D0D0D00CC8AA7 /* WorkspaceService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WorkspaceService.swift; sourceTree = ""; }; - A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InvalidContentView.swift; sourceTree = ""; }; A35E05152EDEA04B003C26CF /* APIChangesetUploadController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIChangesetUploadController.swift; sourceTree = ""; }; A35E05192EDFB015003C26CF /* OSMPayload.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMPayload.swift; sourceTree = ""; }; A35E051B2EDFB093003C26CF /* OSMNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMNode.swift; sourceTree = ""; }; @@ -238,7 +236,6 @@ A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureSnapshot.swift; sourceTree = ""; }; A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureClassSnapshot.swift; sourceTree = ""; }; A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppContext.swift; sourceTree = ""; }; - A38338BE2EDA889A00F1A402 /* CustomPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomPicker.swift; sourceTree = ""; }; A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AnnotationFeatureDetailView.swift; sourceTree = ""; }; A39C9F3A2DD9B03000455E45 /* OSMElement.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSMElement.swift; sourceTree = ""; }; A39C9F3C2DD9BE2600455E45 /* APIConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIConstants.swift; sourceTree = ""; }; @@ -278,8 +275,6 @@ CAF812BB2CF78F7C00D44B84 /* NetworkError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetworkError.swift; sourceTree = ""; }; CAF812C22CFA108100D44B84 /* UserStateViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserStateViewModel.swift; sourceTree = ""; }; DAA7F8B42CA38C11003666D8 /* SharedAppConstants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppConstants.swift; sourceTree = ""; }; - DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpinnerView.swift; sourceTree = ""; }; - DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProgressBar.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ @@ -342,6 +337,7 @@ buildActionMask = 2147483647; files = ( A312FE202FA3EC710044808E /* PointNMapShaderTypes.framework in Frameworks */, + A31DB99E2FA54BEB00DDA385 /* DequeModule in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -467,7 +463,6 @@ A35A8BCE2E5D0CCD00CC8AA7 /* WorkspaceSelectionView.swift */, A38338C02EDA9E3200F1A402 /* SubView */, A3EE6E442F57FE4400F515E6 /* TestMode */, - A31A1E772EAC49E3008B30B7 /* UI */, CAF812C32CFA108100D44B84 /* ViewModel */, ); path = View; @@ -493,6 +488,7 @@ A30801512EC0984F00B1BA3A /* AccessibilityFeature */ = { isa = PBXGroup; children = ( + A322B9012FA53B5D00F4D488 /* Definitions */, A312FF282FA4362C0044808E /* AttributeEstimation */, A38338C32EDAF25400F1A402 /* Attributes */, A30801662EC0AE6B00B1BA3A /* Components */, @@ -541,14 +537,13 @@ path = Extensions; sourceTree = ""; }; - A31A1E772EAC49E3008B30B7 /* UI */ = { + A322B9012FA53B5D00F4D488 /* Definitions */ = { isa = PBXGroup; children = ( - A38338BE2EDA889A00F1A402 /* CustomPicker.swift */, - DAA7F8C12CA684AF003666D8 /* ProgressBar.swift */, - DAA7F8B62CA3E4E7003666D8 /* SpinnerView.swift */, + A322B8082FA5287300F4D488 /* MappedAccessibilityFeature.swift */, + A322B9032FA53B6800F4D488 /* MappedEditableAccessibilityFeature.swift */, ); - path = UI; + path = Definitions; sourceTree = ""; }; A329433D2EE7BF0200C4C1BC /* Others */ = { @@ -591,7 +586,6 @@ A34509DB2FA1A7A7003157B0 /* Utils */ = { isa = PBXGroup; children = ( - A34509D72FA1A6FA003157B0 /* SafeDeque.swift */, ); path = Utils; sourceTree = ""; @@ -637,6 +631,7 @@ A35E051F2EDFB0AD003C26CF /* OSM */ = { isa = PBXGroup; children = ( + A32943452EE7C07A00C4C1BC /* MappingGeometryExtension.swift */, A39C9F3A2DD9B03000455E45 /* OSMElement.swift */, A35E05192EDFB015003C26CF /* OSMPayload.swift */, A35E051B2EDFB093003C26CF /* OSMNode.swift */, @@ -744,7 +739,6 @@ A38338C02EDA9E3200F1A402 /* SubView */ = { isa = PBXGroup; children = ( - A35E050F2EDE60BC003C26CF /* InvalidContentView.swift */, A38338C12EDA9E6500F1A402 /* AnnotationFeatureDetailView.swift */, ); path = SubView; @@ -795,7 +789,6 @@ A3A45F0F2EE7BD9C0029F5AE /* OSW */ = { isa = PBXGroup; children = ( - A32943452EE7C07A00C4C1BC /* OSWGeometry.swift */, A329434B2EE7CFE600C4C1BC /* OSWField.swift */, A32943472EE7C0D800C4C1BC /* OSWElementClass.swift */, A32943522EE814A700C4C1BC /* OSWElement.swift */, @@ -962,6 +955,7 @@ ); name = PointNMapShared; packageProductDependencies = ( + A31DB99D2FA54BEB00DDA385 /* DequeModule */, ); productName = PointNMapShared; productReference = A312FD7B2FA3391B0044808E /* PointNMapShared.framework */; @@ -1131,6 +1125,7 @@ A355471E2EC1A47400F43AFD /* SharedAppData.swift in Sources */, CAF812C42CFA108100D44B84 /* UserStateViewModel.swift in Sources */, A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */, + A322B9052FA53B9B00F4D488 /* MappedAccessibilityFeature.swift in Sources */, A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */, A3FE166C2E1C29CB00DAE5BE /* OtherDetailsCoder.swift in Sources */, A306462A2D614D9600B97D1B /* ImageSaver.swift in Sources */, @@ -1140,6 +1135,7 @@ A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */, A35547C82EC1B0DB00F43AFD /* CurrentMappedFeaturesData.swift in Sources */, A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */, + A322B9042FA53B6F00F4D488 /* MappedEditableAccessibilityFeature.swift in Sources */, A3EE6E462F57FE6400F515E6 /* AppMode.swift in Sources */, A308015E2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift in Sources */, A3E162782F3AFC66002D4D08 /* MeshCoder.swift in Sources */, @@ -1150,7 +1146,6 @@ A374FAB72EE0173600055268 /* OSMChangesetUploadResponseElement.swift in Sources */, A3EE6E4A2F580D6200F515E6 /* TestCameraView.swift in Sources */, A329433C2EE7BEE100C4C1BC /* OSWPolicy.swift in Sources */, - A38338BF2EDA889C00F1A402 /* CustomPicker.swift in Sources */, A32943592EE8204400C4C1BC /* OSWPolygon.swift in Sources */, A3B61FCB2F79036A0052AE2C /* OSMMapDataResponse.swift in Sources */, CAF812BC2CF78F8100D44B84 /* NetworkError.swift in Sources */, @@ -1173,7 +1168,6 @@ A3F27DB42D34E07C0071D6F3 /* DepthAnythingV2SmallF16.mlpackage in Sources */, A3EE6E482F580D0D00F515E6 /* TestListView.swift in Sources */, A312FF232FA430510044808E /* AccessibilityFeatureKindExtension.swift in Sources */, - DAA7F8B72CA3E4E7003666D8 /* SpinnerView.swift in Sources */, A3EE6E4C2F580E2B00F515E6 /* DatasetLister.swift in Sources */, A3FE16652E18C54000DAE5BE /* CameraTransformCoder.swift in Sources */, A3D78D762E654F18003BFE78 /* ProfileView.swift in Sources */, @@ -1193,12 +1187,9 @@ A38338C22EDA9E6F00F1A402 /* AnnotationFeatureDetailView.swift in Sources */, A3D78D742E65108E003BFE78 /* WorkspaceViewModel.swift in Sources */, A3471BA02DAF7ABF00FEB845 /* bisenetv2.mlpackage in Sources */, - A34509D82FA1A6FA003157B0 /* SafeDeque.swift in Sources */, - A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, A37E3E9B2EFB8F7500B07B77 /* HeadingCoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, - DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsCoder.swift in Sources */, A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, A3EE6E522F5F9F1600F515E6 /* APITransmissionHelpers.swift in Sources */, @@ -1211,7 +1202,7 @@ A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */, A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, - A32943462EE7C07E00C4C1BC /* OSWGeometry.swift in Sources */, + A32943462EE7C07E00C4C1BC /* MappingGeometryExtension.swift in Sources */, DAA7F8B52CA38C11003666D8 /* SharedAppConstants.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -1884,6 +1875,11 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ + A31DB99D2FA54BEB00DDA385 /* DequeModule */ = { + isa = XCSwiftPackageProductDependency; + package = CAF812C12CFA0FD400D44B84 /* XCRemoteSwiftPackageReference "swift-collections" */; + productName = DequeModule; + }; A3C22FD72CF2F0C300533BF7 /* DequeModule */ = { isa = XCSwiftPackageProductDependency; package = A3C22FD62CF2F0C300533BF7 /* XCRemoteSwiftPackageReference "swift-collections" */; diff --git a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift index 3658a245..20780124 100644 --- a/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift +++ b/IOSAccessAssessment/AccessibilityFeature/AttributeEstimation/Extensions/IsExistingExtension.swift @@ -9,10 +9,10 @@ import CoreLocation import MapKit extension AttributeEstimationPipeline { - public func processIsExistingRequest( + func processIsExistingRequest( deviceLocation: CLLocationCoordinate2D, mappingData: CurrentMappingData, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: MappedEditableAccessibilityFeature ) { /// Threshold needs to be in Map Units let distanceThreshold = PointNMapConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters * MKMapPointsPerMeterAtLatitude(deviceLocation.latitude) @@ -29,7 +29,7 @@ extension AttributeEstimationPipeline { accessibilityFeature.setIsExisting(false) return } - let isExisting = accessibilityFeature.accessibilityFeatureClass.kind?.oswPolicy.isExistingFirst ?? false + let isExisting = accessibilityFeature.accessibilityFeatureClass.kind.oswPolicy.isExistingFirst accessibilityFeature.setIsExisting(isExisting) accessibilityFeature.setOSWElement(oswElement: matchedElement) } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift similarity index 70% rename from PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift rename to IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift index 21139adc..d5fc076e 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedAccessibilityFeature.swift @@ -6,19 +6,20 @@ // import Foundation import CoreLocation +import PointNMapShared -public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, CustomStringConvertible { - public let id: UUID +struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable, CustomStringConvertible { + let id: UUID - public let accessibilityFeatureClass: AccessibilityFeatureClass + let accessibilityFeatureClass: AccessibilityFeatureClass - public var locationDetails: LocationDetails? - public var oswElement: any OSWElement + var locationDetails: LocationDetails? + var oswElement: any OSWElement - public var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - public var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] + var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] + var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] - public init ( + init ( id: UUID = UUID(), accessibilityFeature: (any AccessibilityFeatureProtocol), oswElement: any OSWElement @@ -31,7 +32,7 @@ public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable self.oswElement = oswElement } - public init( + init( id: UUID = UUID(), accessibilityFeatureClass: AccessibilityFeatureClass, locationDetails: LocationDetails?, @@ -47,17 +48,17 @@ public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable self.locationDetails = locationDetails } - public func getLastLocationCoordinate() -> CLLocationCoordinate2D? { + func getLastLocationCoordinate() -> CLLocationCoordinate2D? { guard let locationDetails else { return nil } guard let lastCoordinate = locationDetails.locations.last?.coordinates.last else { return nil } return lastCoordinate } - public mutating func setLocationDetails(locationDetails: LocationDetails) { + mutating func setLocationDetails(locationDetails: LocationDetails) { self.locationDetails = locationDetails } - public mutating func setAttributeValue( + mutating func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { guard attribute.isCompatible(with: value) else { @@ -66,7 +67,7 @@ public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable attributeValues[attribute] = value } - public mutating func setExperimentalAttributeValue( + mutating func setExperimentalAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute ) throws { guard attribute.isCompatible(with: value) else { @@ -75,15 +76,15 @@ public struct MappedAccessibilityFeature: AccessibilityFeatureProtocol, Sendable experimentalAttributeValues[attribute] = value } - public mutating func setOSWElement(_ oswElement: any OSWElement) { + mutating func setOSWElement(_ oswElement: any OSWElement) { self.oswElement = oswElement } - public static func == (lhs: MappedAccessibilityFeature, rhs: MappedAccessibilityFeature) -> Bool { + static func == (lhs: MappedAccessibilityFeature, rhs: MappedAccessibilityFeature) -> Bool { return lhs.id == rhs.id } - public var description: String { - return "MappedAccessibilityFeature(id: \(id), class: \(accessibilityFeatureClass), location: \(String(describing: locationDetails)), attributes: \(attributeValues), oswElement: \(oswElement))" + var description: String { + return "MappedAccessibilityFeature(id: \(id), class: \(accessibilityFeatureClass), location: \(String(describing: locationDetails)), attributes: \(attributeValues))" } } diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedEditableAccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedEditableAccessibilityFeature.swift new file mode 100644 index 00000000..f2db7eb4 --- /dev/null +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/MappedEditableAccessibilityFeature.swift @@ -0,0 +1,77 @@ +// +// MappedEditableAccessibilityFeature.swift +// IOSAccessAssessment +// +// Created by Himanshu on 5/1/26. +// +import Foundation +import CoreLocation +import PointNMapShared + +class MappedEditableAccessibilityFeature: EditableAccessibilityFeature { + /// If isExisting is false, even if an osw element is associated, it means the feature is new. + /// If isExisting is true, it means the feature corresponds to an existing real-world feature, and the oswElement (if present) represents that existing feature in OSW. + var isExisting: Bool = false + var oswElement: (any OSWElement)? + + required init( + id: UUID = UUID(), + detectedAccessibilityFeature: DetectedAccessibilityFeature + ) { + super.init(id: id, detectedAccessibilityFeature: detectedAccessibilityFeature) + } + + init( + editableAccessibilityFeature: EditableAccessibilityFeature + ) { + self.isExisting = false + self.oswElement = nil + super.init( + id: editableAccessibilityFeature.id, + accessibilityFeatureClass: editableAccessibilityFeature.accessibilityFeatureClass, + contourDetails: editableAccessibilityFeature.contourDetails, + locationDetails: editableAccessibilityFeature.locationDetails, + calculatedAttributeValues: editableAccessibilityFeature.calculatedAttributeValues, + attributeValues: editableAccessibilityFeature.attributeValues, + experimentalAttributeValues: editableAccessibilityFeature.experimentalAttributeValues + ) + } + + init( + id: UUID = UUID(), + accessibilityFeatureClass: AccessibilityFeatureClass, + contourDetails: ContourDetails, + locationDetails: LocationDetails?, + isExisting: Bool = false, + oswElement: (any OSWElement)? = nil, + calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?], + attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?], + experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] + ) { + self.isExisting = isExisting + self.oswElement = oswElement + super.init( + id: id, + accessibilityFeatureClass: accessibilityFeatureClass, + contourDetails: contourDetails, + locationDetails: locationDetails, + calculatedAttributeValues: calculatedAttributeValues, + attributeValues: attributeValues, + experimentalAttributeValues: experimentalAttributeValues + ) + } + + func setIsExisting(_ isExisting: Bool) { + self.isExisting = isExisting + } + + func setOSWElement(oswElement: any OSWElement) { + self.oswElement = oswElement + } + + static func == ( + lhs: MappedEditableAccessibilityFeature, rhs: MappedEditableAccessibilityFeature + ) -> Bool { + return lhs.id == rhs.id + } +} diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift index 0f9e22e9..4474036e 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift @@ -27,7 +27,7 @@ final class AccessibilityFeatureFile { private var snapshot: AccessibilityFeatureSnapshot - init(url: URL, frameNumber: UUID, timestamp: TimeInterval, feature: EditableAccessibilityFeature) throws { + init(url: URL, frameNumber: UUID, timestamp: TimeInterval, feature: any EditableAccessibilityFeatureProtocol) throws { self.url = url if FileManager.default.fileExists(atPath: url.path) { @@ -46,7 +46,7 @@ final class AccessibilityFeatureFile { func update(frameNumber: UUID, timestamp: TimeInterval, feature: any AccessibilityFeatureProtocol) throws { self.snapshot.update(frame: frameNumber, timestamp: timestamp) - if let editableFeature = feature as? EditableAccessibilityFeature { + if let editableFeature = feature as? any EditableAccessibilityFeatureProtocol { self.snapshot.update(from: editableFeature) } else if let mappedFeature = feature as? MappedAccessibilityFeature { self.snapshot.update(from: mappedFeature) @@ -84,7 +84,7 @@ class AccessibilityFeatureEncoder { try FileManager.default.createDirectory(at: outDirectory.absoluteURL, withIntermediateDirectories: true, attributes: nil) } - func insert(features: [EditableAccessibilityFeature], frameNumber: UUID, timestamp: TimeInterval) throws { + func insert(features: [any EditableAccessibilityFeatureProtocol], frameNumber: UUID, timestamp: TimeInterval) throws { try features.forEach { feature in if let featureFile = self.fileStore[feature.id] { /// Update existing file @@ -110,7 +110,7 @@ class AccessibilityFeatureEncoder { if let featureFile = self.fileStore[feature.id] { /// Update existing file try featureFile.update(frameNumber: frameNumber, timestamp: timestamp, feature: feature) - } else if let editableFeature = feature as? EditableAccessibilityFeature { + } else if let editableFeature = feature as? any EditableAccessibilityFeatureProtocol { /// Create new file for editable feature let featureFileURL = self.baseDirectory .appendingPathComponent(editableFeature.id.uuidString, isDirectory: false) diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift index 45e3dab1..ca57b532 100644 --- a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift @@ -28,7 +28,7 @@ struct AccessibilityFeatureSnapshot: Codable, Identifiable, Sendable { var oswElement: String? - init(from accessibilityFeature: EditableAccessibilityFeature) { + init(from accessibilityFeature: any EditableAccessibilityFeatureProtocol) { self.id = accessibilityFeature.id self.accessibilityFeatureClass = .init(from: accessibilityFeature.accessibilityFeatureClass) self.contourDetails = accessibilityFeature.contourDetails @@ -39,7 +39,7 @@ struct AccessibilityFeatureSnapshot: Codable, Identifiable, Sendable { self.experimentalAttributeValues = accessibilityFeature.experimentalAttributeValues } - mutating func update(from accessibilityFeature: EditableAccessibilityFeature) { + mutating func update(from accessibilityFeature: any EditableAccessibilityFeatureProtocol) { self.selectedAnnotationOption = accessibilityFeature.selectedAnnotationOption.rawValue self.locationDetails = accessibilityFeature.locationDetails self.calculatedAttributeValues = accessibilityFeature.calculatedAttributeValues diff --git a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift index 6a4f502b..5f2b7c2e 100644 --- a/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift +++ b/IOSAccessAssessment/Shared/Definitions/CurrentMappingData.swift @@ -74,7 +74,7 @@ class CurrentMappingData: CustomStringConvertible { } for featureClass in accessibilityFeatureClasses { - let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let oswElementClass = featureClass.kind.oswPolicy.oswElementClass let geometry = oswElementClass.geometry let identifyingFieldTags: [String: String] = oswElementClass.identifyingFieldTags @@ -130,6 +130,8 @@ class CurrentMappingData: CustomStringConvertible { polygons.merge(matchingOSWPolygons) { (_, new) in new } points.merge(matchingOSWChildPoints) { (_, new) in new } featuresMap[featureClass] = Array(matchingOSWPolygons.keys) + default: + continue } } self.points = points @@ -142,7 +144,7 @@ class CurrentMappingData: CustomStringConvertible { Updates the features map for a specific accessibility feature class by adding or replacing the features related that class with the provided elements. This function can be used to incrementally update the features map when new data is available for a specific feature class, without needing to rebuild the entire map from scratch. */ func updateFeatures(_ elements: [any OSWElement], for featureClass: AccessibilityFeatureClass) { - let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let oswElementClass = featureClass.kind.oswPolicy.oswElementClass let geometry = oswElementClass.geometry var featureIds = featuresMap[featureClass] ?? [] @@ -180,7 +182,7 @@ class CurrentMappingData: CustomStringConvertible { guard let featureIds = featuresMap[featureClass] else { return nil } var nearestFeature: (any OSWElement)? var nearestDistance: CLLocationDistance = distanceThreshold - let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let oswElementClass = featureClass.kind.oswPolicy.oswElementClass let geometry = oswElementClass.geometry for featureId in featureIds { @@ -208,7 +210,7 @@ class CurrentMappingData: CustomStringConvertible { ) -> (any OSWElement)? { guard let featureIds = featuresMap[featureClass] else { return nil } var nearestFeature: (any OSWElement)? - let oswElementClass = featureClass.kind?.oswPolicy.oswElementClass ?? OSWPolicy.default.oswElementClass + let oswElementClass = featureClass.kind.oswPolicy.oswElementClass let geometry = oswElementClass.geometry let captureIdString = captureId.uuidString @@ -246,7 +248,7 @@ class CurrentMappingData: CustomStringConvertible { } private func getFeature( - featureId: String, geometry: OSWGeometry + featureId: String, geometry: MappingGeometry ) -> (any OSWElement)? { switch geometry { case .point: @@ -255,6 +257,8 @@ class CurrentMappingData: CustomStringConvertible { return lineStrings[featureId] case .polygon: return polygons[featureId] + default: + return nil } } @@ -269,9 +273,9 @@ class CurrentMappingData: CustomStringConvertible { return nil } - /// Note: OSWGeometry is not required as a parameter here since the feature itself carries geometry information based on the type of OSWElement it is. + /// Note: MappingGeometry is not required as a parameter here since the feature itself carries geometry information based on the type of OSWElement it is. private func getFeatureOSMLocationDetails( - feature: any OSWElement, geometry: OSWGeometry + feature: any OSWElement, geometry: MappingGeometry ) -> LocationDetails? { switch geometry { case .point: @@ -303,6 +307,8 @@ class CurrentMappingData: CustomStringConvertible { coordinates: coordinates, isWay: true, isClosed: true ) return LocationDetails(locations: [LocationElement]) + default: + return nil } } } diff --git a/IOSAccessAssessment/TDEI/OSM/MappingGeometryExtension.swift b/IOSAccessAssessment/TDEI/OSM/MappingGeometryExtension.swift new file mode 100644 index 00000000..f4db8874 --- /dev/null +++ b/IOSAccessAssessment/TDEI/OSM/MappingGeometryExtension.swift @@ -0,0 +1,22 @@ +// +// MappingGeometryExtension.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/8/25. +// +import PointNMapShared + +extension MappingGeometry { + var osmElementType: OSMElementType { + switch self { + case .point: + return .node + case .linestring: + return .way + case .polygon: + return .way + default: + return .node + } + } +} diff --git a/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift b/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift index 1b85acba..de615a4a 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift @@ -4,7 +4,15 @@ // // Created by Himanshu on 12/8/25. // +import PointNMapShared +/** + - Warning: MappingGeometry is currently used in two places of the AccessibilityFeature that can end up being different from each other. + AccessibilityFeature -> AccessibilityFeatureClass -> AccessibilityFeatureKind -> MappingGeometry + AccessibilityFeature -> AccessibilityFeatureClass -> OSWPolicy -> OSWElementClass -> Metadata -> MappingGeometry + We need to ensure that the MappingGeometry used in both places is the same and consistent. + Or we need to remove this potential inconsistency probably by removing Metadata from OSWPolicy since OSWPolicy is part of an extension outside the framework and should not have MappingGeometry in it. + */ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { case BareNode case Footway @@ -45,14 +53,14 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { let name: String let description: String let parent: OSWElementClass? - let geometry: OSWGeometry + let geometry: MappingGeometry let identifyingFields: [IdentifyingField] init( name: String, description: String, parent: OSWElementClass? = nil, - geometry: OSWGeometry, + geometry: MappingGeometry, identifyingFields: [IdentifyingField] = [] ) { self.name = name @@ -174,7 +182,7 @@ extension OSWElementClass { return metadata.parent } - var geometry: OSWGeometry { + var geometry: MappingGeometry { return metadata.geometry } diff --git a/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift b/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift deleted file mode 100644 index 0c91d828..00000000 --- a/IOSAccessAssessment/TDEI/OSW/OSWGeometry.swift +++ /dev/null @@ -1,39 +0,0 @@ -// -// OSWGeometry.swift -// IOSAccessAssessment -// -// Created by Himanshu on 12/8/25. -// - -/** - - WARNING: We may want to merge this with the FeatureGeometry enum in the future. - Else, we will have a disconnect between the geometry of the AccessibilityFeature and the geometry of the associated OSW element. - */ -enum OSWGeometry: String, CaseIterable, Hashable, Codable { - case point - case linestring - case polygon - // case multipolygon // For future use, currently not supported by OSW API - - var description: String { - switch self { - case .point: - return "Point" - case .linestring: - return "LineString" - case .polygon: - return "Polygon" - } - } - - var osmElementType: OSMElementType { - switch self { - case .point: - return .node - case .linestring: - return .way - case .polygon: - return .way - } - } -} diff --git a/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift b/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift index 95c400db..4cd9a1f6 100644 --- a/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift +++ b/IOSAccessAssessment/TDEI/Transmission/APIChangesetUploadController.swift @@ -44,7 +44,7 @@ class APIChangesetUploadController: ObservableObject { } func uploadFeatures( - accessibilityFeatures: [any AccessibilityFeatureProtocol], + accessibilityFeatures: [MappedEditableAccessibilityFeature], currentMappedFeaturesData: CurrentMappedFeaturesData, inputs: APIChangesetUploadInputs ) async throws -> APIChangesetUploadResults { @@ -101,6 +101,8 @@ class APIChangesetUploadController: ObservableObject { enhancedAnalysisMode = false case .imageAndMeshData(_): enhancedAnalysisMode = true + default: + enhancedAnalysisMode = false } return [ APIConstants.TagKeys.captureIdKey: captureData.id.uuidString, @@ -114,7 +116,7 @@ class APIChangesetUploadController: ObservableObject { */ extension APIChangesetUploadController { func uploadAllFeatures( - accessibilityFeatures: [any AccessibilityFeatureProtocol], + accessibilityFeatures: [MappedEditableAccessibilityFeature], currentMappedFeaturesData: CurrentMappedFeaturesData, inputs: APIChangesetUploadInputs ) async throws -> APIChangesetUploadResults { @@ -124,7 +126,7 @@ extension APIChangesetUploadController { return APIChangesetUploadResults(failedFeatureUploads: totalFeatures, totalFeatureUploads: totalFeatures) } /// For the sidewalk feature class, only upload one linestring representing the entire sidewalk - if inputs.accessibilityFeatureClass.oswPolicy.oswElementClass == .Sidewalk { + if inputs.accessibilityFeatureClass.kind.oswPolicy.oswElementClass == .Sidewalk { accessibilityFeatures = [firstFeature] totalFeatures = 1 } @@ -136,13 +138,15 @@ extension APIChangesetUploadController { ) for feature in accessibilityFeatures { var diffOperationSets: (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) - switch feature.accessibilityFeatureClass.oswPolicy.oswElementClass.geometry { + switch feature.accessibilityFeatureClass.kind.oswPolicy.oswElementClass.geometry { case .point: diffOperationSets = getDiffOperationsFromPointFeature(feature, additionalTags: additionalTags) case .linestring: diffOperationSets = getDiffOperationsFromLinestringFeature(feature, additionalTags: additionalTags) case .polygon: diffOperationSets = getDiffOperationsFromPolygons(feature, additionalTags: additionalTags) + default: + continue } diffOperationSets.mainOperations.forEach { diffOperation in let oswElement = diffOperation.oswElement @@ -160,7 +164,7 @@ extension APIChangesetUploadController { return APIChangesetUploadResults(failedFeatureUploads: totalFeatures, totalFeatureUploads: totalFeatures) } /// For the sidewalk class, get the previously uploaded linestring, connect it to the new linestring, and add a modify operation - if inputs.accessibilityFeatureClass.oswPolicy.oswElementClass == .Sidewalk, + if inputs.accessibilityFeatureClass.kind.oswPolicy.oswElementClass == .Sidewalk, let newDiffOperation = featureCache.mainEntryList.getOSWLineStringDiffOperations().first, case .create(let newOSWElement) = newDiffOperation, let existingMappedFeature = currentMappedFeaturesData.featuresMap[inputs.accessibilityFeatureClass]?.last, @@ -328,10 +332,10 @@ extension APIChangesetUploadController { */ extension APIChangesetUploadController { private func getDiffOperationsFromPointFeature( - _ feature: any AccessibilityFeatureProtocol, + _ feature: MappedEditableAccessibilityFeature, additionalTags: [String: String] = [:] ) -> (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) { - let oswElementClass = feature.accessibilityFeatureClass.oswPolicy.oswElementClass + let oswElementClass = feature.accessibilityFeatureClass.kind.oswPolicy.oswElementClass guard oswElementClass.geometry == .point else { return ([], []) } guard var featureLocation = feature.getLastLocationCoordinate() else { return ([], []) } var isExisting = false @@ -339,17 +343,15 @@ extension APIChangesetUploadController { var version = "1" /// If feature is of type editable accessibility feature, then also add the calculated attribute values as a property var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - if let editableFeature = feature as? EditableAccessibilityFeature { - calculatedAttributeValues = editableFeature.calculatedAttributeValues - } + calculatedAttributeValues = feature.calculatedAttributeValues /// Add location as additional tags as well var additionalTags = additionalTags additionalTags[APIConstants.TagKeys.calculatedLatitudeKey] = String(featureLocation.latitude) additionalTags[APIConstants.TagKeys.calculatedLongitudeKey] = String(featureLocation.longitude) /// If feature is of type editable accessibility feature and is existing, then use the existing id and version for the point /// to update the existing point in OSM instead of creating a new one - if let editableFeature = feature as? EditableAccessibilityFeature, editableFeature.isExisting { - guard let existingPoint = editableFeature.oswElement as? OSWPoint else { return ([], []) } + if feature.isExisting { + guard let existingPoint = feature.oswElement as? OSWPoint else { return ([], []) } isExisting = true id = existingPoint.id version = existingPoint.version @@ -362,7 +364,7 @@ extension APIChangesetUploadController { let oswPoint = OSWPoint( id: id, version: version, - oswElementClass: feature.accessibilityFeatureClass.oswPolicy.oswElementClass, + oswElementClass: feature.accessibilityFeatureClass.kind.oswPolicy.oswElementClass, latitude: featureLocation.latitude, longitude: featureLocation.longitude, attributeValues: feature.attributeValues, @@ -375,10 +377,10 @@ extension APIChangesetUploadController { } private func getDiffOperationsFromLinestringFeature( - _ feature: any AccessibilityFeatureProtocol, + _ feature: MappedEditableAccessibilityFeature, additionalTags: [String: String] = [:] ) -> (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) { - let oswElementClass = feature.accessibilityFeatureClass.oswPolicy.oswElementClass + let oswElementClass = feature.accessibilityFeatureClass.kind.oswPolicy.oswElementClass guard oswElementClass.geometry == .linestring else { return ([], []) } guard let featureLocationElement: LocationElement = feature.locationDetails?.locations.first, featureLocationElement.isWay, !featureLocationElement.isClosed else { @@ -389,14 +391,12 @@ extension APIChangesetUploadController { var version = "1" /// If feature is of type editable accessibility feature, then also add the calculated attribute values as a property to the linestring var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - if let editableFeature = feature as? EditableAccessibilityFeature { - calculatedAttributeValues = editableFeature.calculatedAttributeValues - } + calculatedAttributeValues = feature.calculatedAttributeValues var additionalTags = additionalTags var pointDiffOperations: [ChangesetDiffOperation] = [] var pointRefs: [String] = [] - if let editableFeature = feature as? EditableAccessibilityFeature, editableFeature.isExisting { - guard let existingLineString = editableFeature.oswElement as? OSWLineString else { + if feature.isExisting { + guard let existingLineString = feature.oswElement as? OSWLineString else { /// If the feature is deemed to exist, but we cannot get an existing linestring for it, then we should not attempt to upload it. return ([], []) } @@ -442,10 +442,10 @@ extension APIChangesetUploadController { } private func getDiffOperationsFromPolygons( - _ feature: any AccessibilityFeatureProtocol, + _ feature: MappedEditableAccessibilityFeature, additionalTags: [String: String] = [:] ) -> (mainOperations: [ChangesetDiffOperation], auxOperations: [ChangesetDiffOperation]) { - let oswElementClass = feature.accessibilityFeatureClass.oswPolicy.oswElementClass + let oswElementClass = feature.accessibilityFeatureClass.kind.oswPolicy.oswElementClass guard oswElementClass.geometry == .polygon else { return ([], []) } guard let featureLocationElement: LocationElement = feature.locationDetails?.locations.first, featureLocationElement.isWay, featureLocationElement.isClosed else { @@ -456,14 +456,12 @@ extension APIChangesetUploadController { var version = "1" /// If feature is of type editable accessibility feature, then also add the calculated attribute values as a property to the polygon var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] - if let editableFeature = feature as? EditableAccessibilityFeature { - calculatedAttributeValues = editableFeature.calculatedAttributeValues - } + calculatedAttributeValues = feature.calculatedAttributeValues var additionalTags = additionalTags var pointDiffOperations: [ChangesetDiffOperation] = [] var pointRefs: [String] = [] - if let editableFeature = feature as? EditableAccessibilityFeature, editableFeature.isExisting { - guard let existingPolygon = editableFeature.oswElement as? OSWPolygon else { + if feature.isExisting { + guard let existingPolygon = feature.oswElement as? OSWPolygon else { /// If the feature is deemed to exist, but we cannot get an existing polygon for it, then we should not attempt to upload it. return ([], []) } diff --git a/IOSAccessAssessment/TDEI/Transmission/APITransmissionHelpers.swift b/IOSAccessAssessment/TDEI/Transmission/APITransmissionHelpers.swift index b1eab343..74f5e33b 100644 --- a/IOSAccessAssessment/TDEI/Transmission/APITransmissionHelpers.swift +++ b/IOSAccessAssessment/TDEI/Transmission/APITransmissionHelpers.swift @@ -7,6 +7,7 @@ import SwiftUI import CoreLocation +import PointNMapShared struct APIChangesetUploadCacheEntry: @unchecked Sendable { let osmOldId: String diff --git a/IOSAccessAssessment/View/AnnotationView.swift b/IOSAccessAssessment/View/AnnotationView.swift index 2ab69c30..46e4db32 100644 --- a/IOSAccessAssessment/View/AnnotationView.swift +++ b/IOSAccessAssessment/View/AnnotationView.swift @@ -9,6 +9,7 @@ import SwiftUI import TipKit import CoreLocation import simd +import PointNMapShared enum AnnotationViewConstants { enum Texts { @@ -140,14 +141,14 @@ class AnnotationFeatureClassSelectionViewModel: ObservableObject { } class AnnotationFeatureSelectionViewModel: ObservableObject { - @Published var instances: [EditableAccessibilityFeature] = [] + @Published var instances: [MappedEditableAccessibilityFeature] = [] @Published var currentIndex: Int? = nil - @Published var currentFeature: EditableAccessibilityFeature? = nil + @Published var currentFeature: MappedEditableAccessibilityFeature? = nil - func setInstances(_ instances: [EditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { + func setInstances(_ instances: [MappedEditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { self.instances = instances /// If the class is sidewalk, we always select the first instance, as there should be only one sidewalk instance. - if (currentClass.oswPolicy.oswElementClass == .Sidewalk) { + if (currentClass.kind.oswPolicy.oswElementClass == .Sidewalk) { try setIndex(index: 0) } else { try setIndex(index: nil) @@ -167,7 +168,7 @@ class AnnotationFeatureSelectionViewModel: ObservableObject { self.currentFeature = instances[index] } - func setCurrent(index: Int?, instances: [EditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { + func setCurrent(index: Int?, instances: [MappedEditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { try setInstances(instances, currentClass: currentClass) try setIndex(index: index) } @@ -232,7 +233,7 @@ struct AnnotationView: View { @EnvironmentObject var sharedAppData: SharedAppData @Environment(\.dismiss) var dismiss - @StateObject var manager: AnnotationImageManager = AnnotationImageManager() + @StateObject var manager: AnnotationImageManager = AnnotationImageManager() @StateObject var segmentationAnnontationPipeline: SegmentationAnnotationPipeline = SegmentationAnnotationPipeline() @StateObject var attributeEstimationPipeline: AttributeEstimationPipeline = AttributeEstimationPipeline() @@ -353,7 +354,7 @@ struct AnnotationView: View { private func mainContent(currentClass: AccessibilityFeatureClass) -> some View { let isDisabledFeatureDetailButton = featureSelectionViewModel.currentFeature == nil orientationStack { - HostedAnnotationImageViewController(annotationImageManager: manager) + HostedAnnotationImageViewController(annotationImageManager: manager) VStack { HStack { @@ -367,7 +368,7 @@ struct AnnotationView: View { CustomPicker ( label: AnnotationViewConstants.Texts.selectObjectText, selection: $featureSelectionViewModel.currentIndex, - isContainsAll: currentClass.oswPolicy.oswElementClass != .Sidewalk + isContainsAll: currentClass.kind.oswPolicy.oswElementClass != .Sidewalk ) { ForEach(featureSelectionViewModel.instances.indices, id: \.self) { featureIndex in Text("\(currentClass.name.capitalized): \(featureIndex)") @@ -561,7 +562,7 @@ struct AnnotationView: View { guard let currentClass = featureClassSelectionViewModel.currentClass else { throw AnnotationViewError.invalidCaptureDataRecord } - var accessibilityFeatures: [EditableAccessibilityFeature] + var accessibilityFeatures: [MappedEditableAccessibilityFeature] var featureSelectedStatus: [UUID: Bool] = [:] var updateFeatureResults: AnnotationImageFeatureUpdateResults? = nil if let currentFeature = featureSelectionViewModel.currentFeature { @@ -574,7 +575,7 @@ struct AnnotationView: View { featureSelectedStatus[oldFeature.id] = false /// Selected, but not highlighted } /// MARK: Temporary code for visualization. Incurs significant performance overhead. - if currentClass.attributes.contains(where: { + if currentClass.kind.attributes.contains(where: { $0 == .width || $0 == .runningSlope || $0 == .crossSlope || $0 == .surfaceIntegrity }) { let plane = try attributeEstimationPipeline.calculateAlignedPlane( @@ -684,7 +685,7 @@ struct AnnotationView: View { guard featureClassSelectionViewModel.selectedAnnotationOption != .classOption(.discard) else { return nil } - let featuresToUpload: [any AccessibilityFeatureProtocol] = featureSelectionViewModel.instances.filter { feature in + let featuresToUpload: [MappedEditableAccessibilityFeature] = featureSelectionViewModel.instances.filter { feature in feature.selectedAnnotationOption != .individualOption(.discard) && feature.accessibilityFeatureClass == accessibilityFeatureClass } diff --git a/IOSAccessAssessment/View/SetupView.swift b/IOSAccessAssessment/View/SetupView.swift index f4299c25..3f9139ea 100644 --- a/IOSAccessAssessment/View/SetupView.swift +++ b/IOSAccessAssessment/View/SetupView.swift @@ -7,6 +7,7 @@ import SwiftUI import TipKit +import PointNMapShared enum SetupViewConstants { enum Texts { diff --git a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift index 2a374a4b..a19e2039 100644 --- a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift +++ b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift @@ -8,422 +8,84 @@ import SwiftUI import PointNMapShared -/** - A view that displays detailed information about an accessibility feature annotation. - Sub-view of the `AnnotationView`. - */ -struct AnnotationFeatureDetailView: View { - enum SharedAppConstants { - enum Texts { - /// Alert texts - static let statusAlertTitleKey: String = "Error" - static let statusAlertDismissAlertSuffixKey: String = "Press OK to dismiss this alert." - static let statusAlertDismissButtonKey: String = "OK" - - /// Is Existing - static let isExistingTitle: String = "Is this an existing feature?" - - /// Invalid - static let invalidTextKey: String = "Invalid" - } - - enum Images { - /// Alert images - static let statusAlertImageNameKey: String = "exclamationmark.triangle.fill" - } - } - - enum AnnotationFeatureDetailViewError: Error, LocalizedError { - case invalidAttributeValue(attribute: AccessibilityFeatureAttribute, message: String) - - var errorDescription: String? { - switch self { - case .invalidAttributeValue(let attribute, let message): - return "Invalid value for \(attribute.displayName): \(message)" - } - } - } - - struct AttributeErrorStatus { - var isError: Bool - var errorMessage: String - - init(isError: Bool, errorMessage: String) { - self.isError = isError - self.errorMessage = errorMessage - } - } - - class StatusViewModel: ObservableObject { - @Published var attributeStatusMap: [AccessibilityFeatureAttribute: AttributeErrorStatus] = [:] +enum AnnotationMappedFeatureDetailViewConstants { + enum Texts { + /// Is Existing + static let isExistingTitle: String = "Is this an existing feature?" - func configure(accessibilityFeature: EditableAccessibilityFeature) { - let attributes = accessibilityFeature.accessibilityFeatureClass.attributes - var attributeStatusMap: [AccessibilityFeatureAttribute: AttributeErrorStatus] = [:] - attributes.forEach { - let initialStatus = AttributeErrorStatus(isError: false, errorMessage: "") - attributeStatusMap[$0] = initialStatus - } - self.attributeStatusMap = attributeStatusMap - } - - func updateAttributeStatus( - for attribute: AccessibilityFeatureAttribute, - isError: Bool, - errorMessage: String - ) { - if let _ = attributeStatusMap[attribute] { - attributeStatusMap[attribute]?.isError = isError - attributeStatusMap[attribute]?.errorMessage = errorMessage - } - } + /// Invalid + static let invalidTextKey: String = "Invalid" } - var accessibilityFeature: EditableAccessibilityFeature - let title: String - - @StateObject private var statusViewModel = AnnotationFeatureDetailView.StatusViewModel() - @FocusState private var focusedField: AccessibilityFeatureAttribute? - /// Note: Fields such as pickers don't have built-in ways to update their UI based on user input. Hence we need to trigger a refresh manually when their value changes. - @State private var refreshTrigger: Int = 0 - - var locationFormatter: NumberFormatter = { - var nf = NumberFormatter() - nf.numberStyle = .decimal - nf.maximumFractionDigits = 7 - nf.minimumFractionDigits = 7 - return nf - }() - - var body: some View { - VStack { - Text(title) - .font(.headline) - .padding() - - Form { - Section(header: Text(AnnotationViewConstants.Texts.featureDetailViewIdKey)) { - Text(accessibilityFeature.id.uuidString) - .foregroundStyle(.secondary) - } - - /** - Location Section - */ - - Section(header: Text(AnnotationViewConstants.Texts.featureDetailViewLocationKey)) { - if let featureLocation = accessibilityFeature.getLastLocationCoordinate() { - VStack { - HStack { - Spacer() - Text( - locationFormatter.string( - from: NSNumber(value: featureLocation.latitude) - ) ?? AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey - ) - .padding(.horizontal) - Text( - locationFormatter.string( - from: NSNumber(value: featureLocation.longitude) - ) ?? AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey - ) - .padding(.horizontal) - Spacer() - } - Divider() - HStack { - Spacer() - Toggle(isOn: Binding( - get: { accessibilityFeature.isExisting && accessibilityFeature.oswElement != nil }, - set: { newValue in - accessibilityFeature.setIsExisting(newValue) - } - )) { - Text(AnnotationFeatureDetailView.SharedAppConstants.Texts.isExistingTitle) - } - .disabled(accessibilityFeature.oswElement == nil) - .foregroundStyle(accessibilityFeature.oswElement == nil ? .secondary : .primary) - .strikethrough(accessibilityFeature.oswElement == nil, pattern: .solid) - Spacer() - } - if let oswElement = accessibilityFeature.oswElement { - HStack { - Spacer() - Text("TDEI Element ID: \(oswElement.id)") - .foregroundStyle(.secondary) - Spacer() - } - .padding(.horizontal) - .padding(.top, 4) - .padding(.bottom, 4) - } - } - } else { - Text(AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey) - .foregroundStyle(.secondary) - } - } - - /** - The Attributes Section - Instead of using a ForEach loop, we manually list out each attribute to have more control over the layout and presentation. - This allows us to customize the display for each attribute type as needed. - There isn't a large number of attributes, so this approach is manageable and provides better clarity. - */ - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.width)) - { - Section(header: Text(AccessibilityFeatureAttribute.width.displayName)) { - numberTextFieldView(attribute: .width) - .focused($focusedField, equals: .width) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.runningSlope)) - { - Section(header: Text(AccessibilityFeatureAttribute.runningSlope.displayName)) { - numberTextFieldView(attribute: .runningSlope) - .focused($focusedField, equals: .runningSlope) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.crossSlope)) - { - Section(header: Text(AccessibilityFeatureAttribute.crossSlope.displayName)) { - numberTextFieldView(attribute: .crossSlope) - .focused($focusedField, equals: .crossSlope) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.surfaceIntegrity)) - { - Section(header: Text(AccessibilityFeatureAttribute.surfaceIntegrity.displayName)) { - pickerView(attribute: .surfaceIntegrity) - .focused($focusedField, equals: .surfaceIntegrity) - .id(refreshTrigger) // Refresh the Picker view when refreshTrigger changes - } - } - - /// Experimental Attributes Section - if (accessibilityFeature.accessibilityFeatureClass.experimentalAttributes.contains(.lidarDepth)) { - Section(header: Text(AccessibilityFeatureAttribute.lidarDepth.displayName)) { - numberTextView(attribute: .lidarDepth) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.experimentalAttributes.contains(.latitudeDelta)) { - Section(header: Text(AccessibilityFeatureAttribute.latitudeDelta.displayName)) { - numberTextView(attribute: .latitudeDelta) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.experimentalAttributes.contains(.longitudeDelta)) { - Section(header: Text(AccessibilityFeatureAttribute.longitudeDelta.displayName)) { - numberTextView(attribute: .longitudeDelta) - } - } - - /// Legacy Attributes Section - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.widthLegacy)) - { - Section(header: Text(AccessibilityFeatureAttribute.widthLegacy.displayName)) { - numberTextFieldView(attribute: .widthLegacy) - .focused($focusedField, equals: .widthLegacy) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.runningSlopeLegacy)) - { - Section(header: Text(AccessibilityFeatureAttribute.runningSlopeLegacy.displayName)) { - numberTextFieldView(attribute: .runningSlopeLegacy) - .focused($focusedField, equals: .runningSlopeLegacy) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.crossSlopeLegacy)) - { - Section(header: Text(AccessibilityFeatureAttribute.crossSlopeLegacy.displayName)) { - numberTextFieldView(attribute: .crossSlopeLegacy) - .focused($focusedField, equals: .crossSlopeLegacy) - } - } - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.widthFromImage)) - { - Section(header: Text(AccessibilityFeatureAttribute.widthFromImage.displayName)) { - numberTextFieldView(attribute: .widthFromImage) - .focused($focusedField, equals: .widthFromImage) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.runningSlopeFromImage)) - { - Section(header: Text(AccessibilityFeatureAttribute.runningSlopeFromImage.displayName)) { - numberTextFieldView(attribute: .runningSlopeFromImage) - .focused($focusedField, equals: .runningSlopeFromImage) - } - } - - if (accessibilityFeature.accessibilityFeatureClass.attributes.contains(.crossSlopeFromImage)) - { - Section(header: Text(AccessibilityFeatureAttribute.crossSlopeFromImage.displayName)) { - numberTextFieldView(attribute: .crossSlopeFromImage) - .focused($focusedField, equals: .crossSlopeFromImage) - } - } - } - } - .onAppear { - self.statusViewModel.configure(accessibilityFeature: accessibilityFeature) - focusedField = nil - } - .onTapGesture { - // Dismiss the keyboard when tapping outside of a TextField - focusedField = nil - } + enum Images { + /// Alert images + static let statusAlertImageNameKey: String = "exclamationmark.triangle.fill" } - - @ViewBuilder - private func numberTextFieldView(attribute: AccessibilityFeatureAttribute) -> some View { - let attributeStatus = statusViewModel.attributeStatusMap[attribute] ?? .init(isError: false, errorMessage: "") - VStack { - if (attributeStatus.isError) { - /// A red colored error message - HStack { - Label( - attributeStatus.errorMessage, - systemImage: AnnotationFeatureDetailView.SharedAppConstants.Images.statusAlertImageNameKey - ) - .foregroundStyle(.red) - .font(.caption) - Spacer() - } - } - TextField( - attribute.displayName, - value: Binding( - get: { - guard let attributeValue = accessibilityFeature.attributeValues[attribute], - let attributeValue, - let attributeBindableValue = attributeValue.toDouble() else { - return 0.0 - } - return attributeBindableValue - }, - set: { newValue in - do { - let newDoubleValue = Double(newValue) - guard let newAttributeValue = attribute.value(from: newDoubleValue) else { - return +} + +/** + A view that displays detailed information about an accessibility feature annotation. + Sub-view of the `AnnotationView`. + */ +@ViewBuilder +func AnnotationFeatureDetailView( + accessibilityFeature: MappedEditableAccessibilityFeature, + title: String +) -> some View { + AnnotationFeatureDetailViewBase( + accessibilityFeature: accessibilityFeature, title: title + ) { feature in + let locationFormatter = AnnotationFeatureDetailLocationFormatter() + Section(header: Text(AnnotationViewConstants.Texts.featureDetailViewLocationKey)) { + if let featureLocation = accessibilityFeature.getLastLocationCoordinate() { + VStack { + HStack { + Spacer() + Text( + locationFormatter.string( + from: NSNumber(value: featureLocation.latitude) + ) ?? AnnotationMappedFeatureDetailViewConstants.Texts.invalidTextKey + ) + .padding(.horizontal) + Text( + locationFormatter.string( + from: NSNumber(value: featureLocation.longitude) + ) ?? AnnotationMappedFeatureDetailViewConstants.Texts.invalidTextKey + ) + .padding(.horizontal) + Spacer() + } + Divider() + HStack { + Spacer() + Toggle(isOn: Binding( + get: { accessibilityFeature.isExisting && accessibilityFeature.oswElement != nil }, + set: { newValue in + accessibilityFeature.setIsExisting(newValue) } - try accessibilityFeature.setAttributeValue(newAttributeValue, for: attribute) - } catch { - setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") + )) { + Text(AnnotationMappedFeatureDetailViewConstants.Texts.isExistingTitle) } - } - ), - format: .number - ) - .textFieldStyle(.roundedBorder) - .keyboardType(.decimalPad) - } - } - - @ViewBuilder - private func numberTextView(attribute: AccessibilityFeatureAttribute) -> some View { - let attributeStatus = statusViewModel.attributeStatusMap[attribute] ?? .init(isError: false, errorMessage: "") - let valueToDisplay: String = { - guard let attributeValue = accessibilityFeature.experimentalAttributeValues[attribute], - let attributeValue, - let attributeBindableValue = attributeValue.toDouble() else { - return AnnotationFeatureDetailView.SharedAppConstants.Texts.invalidTextKey - } - return String(attributeBindableValue) - }() - VStack { - if (attributeStatus.isError) { - /// A red colored error message - HStack { - Label( - attributeStatus.errorMessage, - systemImage: AnnotationFeatureDetailView.SharedAppConstants.Images.statusAlertImageNameKey - ) - .foregroundStyle(.red) - .font(.caption) - Spacer() - } - } - Text(valueToDisplay) - } - } - - @ViewBuilder - private func toggleView(attribute: AccessibilityFeatureAttribute) -> some View { - Toggle( - isOn: Binding( - get: { - guard let attributeValue = accessibilityFeature.attributeValues[attribute], - let attributeValue, - let attributeBindableValue = attributeValue.toBool() else { - return false - } - return attributeBindableValue - }, - set: { newValue in - do { - let newBoolValue = Bool(newValue) - guard let newAttributeValue = attribute.value(from: newBoolValue) else { - return + .disabled(accessibilityFeature.oswElement == nil) + .foregroundStyle(accessibilityFeature.oswElement == nil ? .secondary : .primary) + .strikethrough(accessibilityFeature.oswElement == nil, pattern: .solid) + Spacer() + } + if let oswElement = accessibilityFeature.oswElement { + HStack { + Spacer() + Text("TDEI Element ID: \(oswElement.id)") + .foregroundStyle(.secondary) + Spacer() } - try accessibilityFeature.setAttributeValue(newAttributeValue, for: attribute) - } catch { - setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") + .padding(.horizontal) + .padding(.top, 4) + .padding(.bottom, 4) } } - ) - ) { - Text(attribute.displayName) - } - } - - @ViewBuilder - private func pickerView(attribute: AccessibilityFeatureAttribute) -> some View { - Picker( - attribute.displayName, - selection: Binding( - get: { - guard case .categorical(let category) = accessibilityFeature.attributeValues[attribute] else { - return attribute.categoricalOptions().first - } - return category - }, - set: { newValue in - guard let newValue else { return } - do { - let newCategoricalValue: AccessibilityFeatureAttribute.Value = .categorical(newValue) - try accessibilityFeature.setAttributeValue(newCategoricalValue, for: attribute) - refreshTrigger += 1 // Trigger a refresh to update the Picker's displayed value - } catch { - setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") - } - } - )) { - ForEach(attribute.categoricalOptions(), id: \.self) { option in - Text(option.rawValue).tag(option) - } - } - .pickerStyle(.menu) - } - - private func setAttributeStatusErrorText( - for attribute: AccessibilityFeatureAttribute, message: String - ) { - statusViewModel.updateAttributeStatus(for: attribute, isError: true, errorMessage: message) - Task { - do { - try await Task.sleep(for: .seconds(2)) - statusViewModel.updateAttributeStatus(for: attribute, isError: false, errorMessage: "") - } catch { - print("Failed to reset attribute error status: \(error.localizedDescription)") + } else { + Text(AnnotationMappedFeatureDetailViewConstants.Texts.invalidTextKey) + .foregroundStyle(.secondary) } } } diff --git a/IOSAccessAssessment/View/TestMode/TestListView.swift b/IOSAccessAssessment/View/TestMode/TestListView.swift index dc1cbc44..bdf258f6 100644 --- a/IOSAccessAssessment/View/TestMode/TestListView.swift +++ b/IOSAccessAssessment/View/TestMode/TestListView.swift @@ -7,6 +7,7 @@ import SwiftUI import TipKit +import PointNMapShared enum TestListViewError: Error, LocalizedError { case workspacesUnavailable diff --git a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift index 20e6bf9b..131394e2 100644 --- a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraManager.swift @@ -9,7 +9,7 @@ import RealityKit import Combine import simd -enum ARCameraManagerError: Error, LocalizedError { +public enum ARCameraManagerError: Error, LocalizedError { case sessionConfigurationFailed case pixelBufferPoolCreationFailed case pixelBufferCreationFailed @@ -28,7 +28,7 @@ enum ARCameraManagerError: Error, LocalizedError { case finalSessionNoSegmentationClass case finalSessionNoSegmentationMesh - var errorDescription: String? { + public var errorDescription: String? { switch self { case .sessionConfigurationFailed: return "AR session configuration failed." @@ -68,42 +68,42 @@ enum ARCameraManagerError: Error, LocalizedError { } } -enum ARCameraManagerConstants { - enum MeshResults { - static let meshAnchorEntityPlaceholderName = "ARCameraManager_MeshAnchorEntity" +public enum ARCameraManagerConstants { + public enum MeshResults { + public static let meshAnchorEntityPlaceholderName = "ARCameraManager_MeshAnchorEntity" } - enum Payload { - static let isCameraStopped = "isStopped" - static let cameraTransform = "cameraTransform" - static let cameraIntrinsics = "cameraIntrinsics" - static let originalImageSize = "originalImageSize" + public enum Payload { + public static let isCameraStopped = "isStopped" + public static let cameraTransform = "cameraTransform" + public static let cameraIntrinsics = "cameraIntrinsics" + public static let originalImageSize = "originalImageSize" } } -enum MeshUpdateType { +public enum MeshUpdateType { case add case update case remove } -struct ARCameraImageResults { - let cameraImage: CIImage - var depthImage: CIImage? = nil - var confidenceImage: CIImage? = nil - - let segmentationLabelImage: CIImage - let segmentedClasses: [AccessibilityFeatureClass] - let detectedFeatureMap: [UUID: DetectedAccessibilityFeature] - let cameraTransform: simd_float4x4 - let cameraIntrinsics: simd_float3x3 - let interfaceOrientation: UIInterfaceOrientation - let originalImageSize: CGSize - - var segmentationColorImage: CIImage? = nil - var segmentationBoundingFrameImage: CIImage? = nil - - init( +public struct ARCameraImageResults { + public let cameraImage: CIImage + public var depthImage: CIImage? = nil + public var confidenceImage: CIImage? = nil + + public let segmentationLabelImage: CIImage + public let segmentedClasses: [AccessibilityFeatureClass] + public let detectedFeatureMap: [UUID: DetectedAccessibilityFeature] + public let cameraTransform: simd_float4x4 + public let cameraIntrinsics: simd_float3x3 + public let interfaceOrientation: UIInterfaceOrientation + public let originalImageSize: CGSize + + public var segmentationColorImage: CIImage? = nil + public var segmentationBoundingFrameImage: CIImage? = nil + + public init( cameraImage: CIImage, depthImage: CIImage? = nil, confidenceImage: CIImage? = nil, segmentationLabelImage: CIImage, segmentedClasses: [AccessibilityFeatureClass], detectedFeatureMap: [UUID: DetectedAccessibilityFeature], @@ -131,17 +131,17 @@ struct ARCameraImageResults { } } -struct ARCameraMeshResults { - let meshGPUSnapshot: MeshGPUSnapshot +public struct ARCameraMeshResults { + public let meshGPUSnapshot: MeshGPUSnapshot - let meshAnchors: [ARMeshAnchor] - let segmentationLabelImage: CIImage - let cameraTransform: simd_float4x4 - let cameraIntrinsics: simd_float3x3 + public let meshAnchors: [ARMeshAnchor] + public let segmentationLabelImage: CIImage + public let cameraTransform: simd_float4x4 + public let cameraIntrinsics: simd_float3x3 - let lastUpdated: TimeInterval + public let lastUpdated: TimeInterval - init( + public init( meshGPUSnapshot: MeshGPUSnapshot, meshAnchors: [ARMeshAnchor], segmentationLabelImage: CIImage, @@ -161,11 +161,11 @@ struct ARCameraMeshResults { /** A struct to cache camera properties for optimization. */ -struct ARCameraCache { - var cameraImageSize: CGSize? - var interfaceOrientation: UIInterfaceOrientation +public struct ARCameraCache { + public var cameraImageSize: CGSize? + public var interfaceOrientation: UIInterfaceOrientation - init(cameraImageSize: CGSize? = nil, interfaceOrientation: UIInterfaceOrientation = .portrait) { + public init(cameraImageSize: CGSize? = nil, interfaceOrientation: UIInterfaceOrientation = .portrait) { self.cameraImageSize = cameraImageSize self.interfaceOrientation = interfaceOrientation } @@ -178,61 +178,61 @@ struct ARCameraCache { - First, initialize with local properties (e.g. pixel buffer pools). - Accept configuration of the SegmentationARPipeline through a separate `configure()` method. */ -final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessingDelegate { - var selectedClasses: [AccessibilityFeatureClass] = [] - var segmentationPipeline: SegmentationARPipeline? = nil +public final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessingDelegate { + public var selectedClasses: [AccessibilityFeatureClass] = [] + public var segmentationPipeline: SegmentationARPipeline? = nil /// Whether to enable enhanced analysis (e.g., anchor-based analysis) or not. - var isEnhancedAnalysisEnabled: Bool = false - var metalContext: MetalContext? = nil - var cameraOutputImageCallback: ((any CaptureImageDataProtocol) -> Void)? = nil + public var isEnhancedAnalysisEnabled: Bool = false + public var metalContext: MetalContext? = nil + public var cameraOutputImageCallback: ((any CaptureImageDataProtocol) -> Void)? = nil /// Mesh update callbacks not in use for now, as creating snapshots in real-time is expensive. // var cameraOutputMeshCallback: ((any CaptureMeshDataProtocol) -> Void)? = nil // Consumer that will receive processed overlays (weak to avoid retain cycles) - weak var outputConsumer: ARSessionCameraProcessingOutputConsumer? = nil - var imageResolution: CGSize = .zero - @Published var interfaceOrientation: UIInterfaceOrientation = .portrait + public weak var outputConsumer: ARSessionCameraProcessingOutputConsumer? = nil + public var imageResolution: CGSize = .zero + @Published public var interfaceOrientation: UIInterfaceOrientation = .portrait - var meshGPUSnapshotGenerator: MeshGPUSnapshotGenerator? = nil - var capturedMeshSnapshotGenerator: CapturedMeshSnapshotGenerator? = nil + public var meshGPUSnapshotGenerator: MeshGPUSnapshotGenerator? = nil + public var capturedMeshSnapshotGenerator: CapturedMeshSnapshotGenerator? = nil - var frameRate: Int = 15 - var lastFrameTime: TimeInterval = 0 - var meshFrameRate: Int = 15 - var lastMeshFrameTime: TimeInterval = 0 + public var frameRate: Int = 15 + public var lastFrameTime: TimeInterval = 0 + public var meshFrameRate: Int = 15 + public var lastMeshFrameTime: TimeInterval = 0 // Contexts depending on type of color space processing required - let colorContext = CIContext(options: nil) - let rawContext = CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) + public let colorContext = CIContext(options: nil) + public let rawContext = CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) // Properties for processing camera and depth frames // Pixel buffer pools for rendering camera frames to fixed size as segmentation model input (pre-defined size) - var cameraCroppedPixelBufferPool: CVPixelBufferPool? = nil - var cameraColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() - var cameraPixelFormatType: OSType = kCVPixelFormatType_32BGRA - var segmentationBoundingFrameColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() - var depthPixelBufferPool: CVPixelBufferPool? = nil - var depthPixelFormatType: OSType = kCVPixelFormatType_DepthFloat32 - var depthColorSpace: CGColorSpace? = nil + public var cameraCroppedPixelBufferPool: CVPixelBufferPool? = nil + public var cameraColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() + public var cameraPixelFormatType: OSType = kCVPixelFormatType_32BGRA + public var segmentationBoundingFrameColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() + public var depthPixelBufferPool: CVPixelBufferPool? = nil + public var depthPixelFormatType: OSType = kCVPixelFormatType_DepthFloat32 + public var depthColorSpace: CGColorSpace? = nil // Pixel buffer pools for backing segmentation images to pixel buffer of camera frame size - var segmentationMaskPixelBufferPool: CVPixelBufferPool? = nil - var segmentationMaskPixelFormatType: OSType = kCVPixelFormatType_OneComponent8 + public var segmentationMaskPixelBufferPool: CVPixelBufferPool? = nil + public var segmentationMaskPixelFormatType: OSType = kCVPixelFormatType_OneComponent8 /// TODO: While the segmentation color space is hard-coded for now, add it as part of the AccessibilityFeatureConfig later. - var segmentationMaskColorSpace: CGColorSpace? = nil - var segmentationColorPixelFormatType: OSType = kCVPixelFormatType_32BGRA - var segmentationColorColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() + public var segmentationMaskColorSpace: CGColorSpace? = nil + public var segmentationColorPixelFormatType: OSType = kCVPixelFormatType_32BGRA + public var segmentationColorColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB() - @Published var isConfigured: Bool = false + @Published public var isConfigured: Bool = false // Latest processed results - var cameraImageResults: ARCameraImageResults? - var cameraMeshResults: ARCameraMeshResults? - var cameraCache: ARCameraCache = ARCameraCache() + public var cameraImageResults: ARCameraImageResults? + public var cameraMeshResults: ARCameraMeshResults? + public var cameraCache: ARCameraCache = ARCameraCache() - override init() { + public override init() { super.init() } - func configure( + public func configure( selectedClasses: [AccessibilityFeatureClass], segmentationPipeline: SegmentationARPipeline, metalContext: MetalContext?, isEnhancedAnalysisEnabled: Bool, @@ -257,7 +257,7 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi } } - func setVideoFormatImageResolution(_ imageResolution: CGSize) { + public func setVideoFormatImageResolution(_ imageResolution: CGSize) { self.imageResolution = imageResolution do { try setupSegmentationPixelBufferPool(size: imageResolution) @@ -266,13 +266,13 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi } } - func setOrientation(_ orientation: UIInterfaceOrientation) { + public func setOrientation(_ orientation: UIInterfaceOrientation) { Task { @MainActor in self.interfaceOrientation = orientation } } - func setFrameRate(_ frameRate: Int) { + public func setFrameRate(_ frameRate: Int) { self.frameRate = frameRate } @@ -280,26 +280,26 @@ final class ARCameraManager: NSObject, ObservableObject, ARSessionCameraProcessi self.meshFrameRate = meshFrameRate } - func session(_ session: ARSession, didUpdate frame: ARFrame) { + public func session(_ session: ARSession, didUpdate frame: ARFrame) { handleSessionFrameUpdate(frame: frame) } /// TODO: MESH PROCESSING: Use the mesh processing as well in future. - func session(_ session: ARSession, didAdd anchors: [ARAnchor]) { + public func session(_ session: ARSession, didAdd anchors: [ARAnchor]) { handleSessionMeshUpdate(anchors, updateType: .add) } - func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { + public func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { handleSessionMeshUpdate(anchors, updateType: .update) } - func session(_ session: ARSession, didRemove anchors: [ARAnchor]) { + public func session(_ session: ARSession, didRemove anchors: [ARAnchor]) { handleSessionMeshUpdate(anchors, updateType: .remove) } } // Functions to handle the image processing pipeline -extension ARCameraManager { +public extension ARCameraManager { private func handleSessionFrameUpdate(frame: ARFrame) { guard isConfigured else { return @@ -375,7 +375,8 @@ extension ARCameraManager { croppedSize: CGSize = PointNMapConstants.SelectedAccessibilityFeatureConfig.inputSize ) async throws -> ARCameraImageResults { guard let cameraCroppedPixelBufferPool = cameraCroppedPixelBufferPool, - let segmentationPixelBufferPool = segmentationMaskPixelBufferPool else { + let segmentationPixelBufferPool = segmentationMaskPixelBufferPool + else { throw ARCameraManagerError.pixelBufferPoolCreationFailed } guard let segmentationPipeline = segmentationPipeline else { @@ -531,7 +532,7 @@ extension ARCameraManager { } // Functions to handle the mesh processing pipeline -extension ARCameraManager { +public extension ARCameraManager { private func handleSessionMeshUpdate(_ anchors: [ARAnchor], updateType: MeshUpdateType) { guard isEnhancedAnalysisEnabled else { return @@ -613,7 +614,7 @@ extension ARCameraManager { } // Functions to orient and fix the camera and depth frames -extension ARCameraManager { +public extension ARCameraManager { private func setUpPreAllocatedPixelBufferPools(size: CGSize) throws { // Set up the pixel buffer pool for future flattening of camera images let cameraPixelBufferPoolAttributes: [String: Any] = [ @@ -730,14 +731,20 @@ extension ARCameraManager { } } -struct CapturedMeshDependencies { - let capturedMeshSnapshotGenerator: CapturedMeshSnapshotGenerator - let metalContext: MetalContext - let meshGPUSnapshot: MeshGPUSnapshot +public struct CapturedMeshDependencies { + public let capturedMeshSnapshotGenerator: CapturedMeshSnapshotGenerator + public let metalContext: MetalContext + public let meshGPUSnapshot: MeshGPUSnapshot + + public init(capturedMeshSnapshotGenerator: CapturedMeshSnapshotGenerator, metalContext: MetalContext, meshGPUSnapshot: MeshGPUSnapshot) { + self.capturedMeshSnapshotGenerator = capturedMeshSnapshotGenerator + self.metalContext = metalContext + self.meshGPUSnapshot = meshGPUSnapshot + } } // Functions to perform final session update -extension ARCameraManager { +public extension ARCameraManager { /** Combines the two methods below for convenience. */ diff --git a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift index 8ee1fffe..d38da668 100644 --- a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift @@ -14,7 +14,7 @@ import simd /// The consumer of post-processed camera outputs (e.g., overlay images). @MainActor -protocol ARSessionCameraProcessingOutputConsumer: AnyObject { +public protocol ARSessionCameraProcessingOutputConsumer: AnyObject { func cameraOutputImage( _ delegate: ARSessionCameraProcessingDelegate, metalContext: MetalContext, @@ -40,7 +40,7 @@ protocol ARSessionCameraProcessingOutputConsumer: AnyObject { func pauseSession() } -protocol ARSessionCameraProcessingDelegate: ARSessionDelegate { +public protocol ARSessionCameraProcessingDelegate: ARSessionDelegate { /// Set by the host (e.g., ARCameraViewController) to receive processed overlays. @MainActor var outputConsumer: ARSessionCameraProcessingOutputConsumer? { get set } @@ -54,13 +54,21 @@ protocol ARSessionCameraProcessingDelegate: ARSessionDelegate { /** A small struct to save other important attributes of the mesh to maintain sync. */ -struct MeshOtherDetails: Sendable { - let vertexStride: Int - let vertexOffset: Int - let indexStride: Int - let classificationStride: Int +public struct MeshOtherDetails: Sendable { + public let vertexStride: Int + public let vertexOffset: Int + public let indexStride: Int + public let classificationStride: Int - let totalVertexCount: Int + public let totalVertexCount: Int + + public init(vertexStride: Int, vertexOffset: Int, indexStride: Int, classificationStride: Int, totalVertexCount: Int) { + self.vertexStride = vertexStride + self.vertexOffset = vertexOffset + self.indexStride = indexStride + self.classificationStride = classificationStride + self.totalVertexCount = totalVertexCount + } } /** @@ -71,8 +79,8 @@ struct MeshOtherDetails: Sendable { Also processes the mesh data and (optionally) maintains the mesh entities in the ARView scene. */ @MainActor -final class ARCameraViewController: UIViewController, ARSessionCameraProcessingOutputConsumer { - var arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate +public final class ARCameraViewController: UIViewController, ARSessionCameraProcessingOutputConsumer { + public var arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate /** Sub-view containing the other views @@ -125,16 +133,16 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO private var meshRecords: [AccessibilityFeatureClass: SegmentationMeshRecord] = [:] private var meshOtherDetails: MeshOtherDetails? = nil - init(arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate) { + public init(arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate) { self.arSessionCameraProcessingDelegate = arSessionCameraProcessingDelegate super.init(nibName: nil, bundle: nil) } - required init?(coder: NSCoder) { + public required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") } - override func viewDidLoad() { + public override func viewDidLoad() { super.viewDidLoad() view.clipsToBounds = true @@ -236,7 +244,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO } } - func getOrientation() -> UIInterfaceOrientation { + public func getOrientation() -> UIInterfaceOrientation { // TODO: While we are requested to replace usage with effectiveGeometry.interfaceOrientation, // it seems to cause issues with getting the correct orientation. // Need to investigate further. @@ -254,23 +262,23 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO return getOrientation().isPortrait } - func applyDebugIfNeeded() { + public func applyDebugIfNeeded() { arView.environment.sceneUnderstanding.options.insert(.occlusion) /// TODO: MESH PROCESSING: Use the mesh processing visualization // arView.debugOptions.insert(.showSceneUnderstanding) } - override func viewWillAppear(_ animated: Bool) { + public override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) runSessionIfNeeded() } - override func viewWillDisappear(_ animated: Bool) { + public override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) pauseSession() } - override func viewDidLayoutSubviews() { + public override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() updateFitConstraints() updateAlignConstraints() @@ -278,7 +286,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO arSessionCameraProcessingDelegate.setOrientation(getOrientation()) } - override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { + public override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { super.viewWillTransition(to: size, with: coordinator) coordinator.animate(alongsideTransition: { _ in self.updateFitConstraints() @@ -293,7 +301,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO // pauseSession() // } - func runSessionIfNeeded() { + public func runSessionIfNeeded() { let config = ARWorldTrackingConfiguration() config.worldAlignment = .gravityAndHeading if ARWorldTrackingConfiguration.supportsSceneReconstruction(.meshWithClassification) { @@ -318,7 +326,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO /** Resumes the AR session and sets its delegate. */ - func resumeSession() { + public func resumeSession() { arView.session.delegate = arSessionCameraProcessingDelegate runSessionIfNeeded() } @@ -326,7 +334,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO /** Pauses the AR session and removes its delegate. */ - func pauseSession() { + public func pauseSession() { arView.session.delegate = nil arView.session.pause() self.anchorEntity.removeFromParent() @@ -335,7 +343,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO self.meshOtherDetails = nil } - func cameraOutputImage( + public func cameraOutputImage( _ delegate: ARSessionCameraProcessingDelegate, metalContext: MetalContext, segmentationImage: CIImage?, segmentationBoundingFrameImage: CIImage?, @@ -352,7 +360,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO } } - func cameraOutputMesh( + public func cameraOutputMesh( _ delegate: ARSessionCameraProcessingDelegate, metalContext: MetalContext, meshGPUSnapshot: MeshGPUSnapshot, @@ -410,7 +418,7 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO ) } - func getMeshRecordDetails() -> ( + public func getMeshRecordDetails() -> ( records: [AccessibilityFeatureClass: SegmentationMeshRecord], otherDetails: MeshOtherDetails? ) { @@ -418,17 +426,17 @@ final class ARCameraViewController: UIViewController, ARSessionCameraProcessingO } } -struct HostedARCameraViewContainer: UIViewControllerRepresentable { - var arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate +public struct HostedARCameraViewContainer: UIViewControllerRepresentable { + public var arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate - func makeUIViewController(context: Context) -> ARCameraViewController { + public func makeUIViewController(context: Context) -> ARCameraViewController { let vc = ARCameraViewController(arSessionCameraProcessingDelegate: arSessionCameraProcessingDelegate) return vc } - func updateUIViewController(_ uiViewController: ARCameraViewController, context: Context) { + public func updateUIViewController(_ uiViewController: ARCameraViewController, context: Context) { } - static func dismantleUIViewController(_ uiViewController: ARCameraViewController, coordinator: ()) { + public static func dismantleUIViewController(_ uiViewController: ARCameraViewController, coordinator: ()) { } } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift index a4050827..c8c052fe 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/AttributeEstimationPipeline.swift @@ -83,6 +83,8 @@ public class AttributeEstimationPipeline: ObservableObject { public var prerequisiteCache = PrerequisiteCache() + public init() {} + /// TODO: MESH PROCESSING: Add mesh data processing components when needed. public func configure( captureImageData: (any CaptureImageDataProtocol), @@ -108,7 +110,7 @@ public class AttributeEstimationPipeline: ObservableObject { } public func setPrerequisites( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws { let accessibilityFeatureKind = accessibilityFeature.accessibilityFeatureClass.kind let isMeshEnabled: Bool = captureMeshData != nil @@ -171,7 +173,7 @@ public class AttributeEstimationPipeline: ObservableObject { public func processLocationRequest( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws { let locationRequestResult = try self.calculateLocation( deviceLocation: deviceLocation, @@ -212,11 +214,11 @@ public class AttributeEstimationPipeline: ObservableObject { } public func processAttributeRequest( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws { var attributeAssignmentFlagError = false - for attribute in accessibilityFeature.accessibilityFeatureClass.attributes { + for attribute in accessibilityFeature.accessibilityFeatureClass.kind.attributes { do { switch attribute { case .width: diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift index a7690e0f..0417d21b 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Archive/OtherAttributeExtensionLegacy.swift @@ -15,7 +15,7 @@ import CoreLocation */ public extension AttributeEstimationPipeline { func calculateWidthLegacy( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) @@ -52,7 +52,7 @@ public extension AttributeEstimationPipeline { } func calculateRunningSlopeLegacy( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) @@ -91,7 +91,7 @@ public extension AttributeEstimationPipeline { } func calculateCrossSlopeLegacy( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift index bb3c885d..84dcd043 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationExtension.swift @@ -10,11 +10,11 @@ import CoreLocation public extension AttributeEstimationPipeline { func calculateLocation( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { let isMeshEnabled: Bool = self.captureMeshData != nil - let oswGeometry = accessibilityFeature.accessibilityFeatureClass.kind?.geometry ?? FeatureGeometry.default - switch(oswGeometry) { + let geometry = accessibilityFeature.accessibilityFeatureClass.kind.geometry + switch(geometry) { case .linestring: if isMeshEnabled { return try self.calculateLocationFromMeshForLineString( @@ -46,7 +46,7 @@ public extension AttributeEstimationPipeline { public extension AttributeEstimationPipeline { func calculateLocationFromImageForPoint( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) @@ -69,7 +69,7 @@ public extension AttributeEstimationPipeline { func calculateLocationFromImageForLineString( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) @@ -109,7 +109,7 @@ public extension AttributeEstimationPipeline { func calculateLocationFromImageForPolygon( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) @@ -144,7 +144,7 @@ public extension AttributeEstimationPipeline { public extension AttributeEstimationPipeline { func calculateLocationFromMeshForLineString( deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { guard let depthMapProcessor = self.depthMapProcessor else { throw AttributeEstimationPipelineError.configurationError(AttributeEstimationPipelineConstants.Texts.depthMapProcessorKey) diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift index 10e94187..b5513cfd 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromImageExtension.swift @@ -14,7 +14,7 @@ public extension AttributeEstimationPipeline { localizationProcessor: LocalizationProcessor, captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { let featureDepthValue = try depthMapProcessor.getFeatureDepthAtCentroidInRadius( detectedFeature: accessibilityFeature, radius: 3 @@ -45,7 +45,7 @@ public extension AttributeEstimationPipeline { localizationProcessor: LocalizationProcessor, captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature, + accessibilityFeature: any EditableAccessibilityFeatureProtocol, plane: Plane, worldPoints: [WorldPoint] ) throws -> LocationRequestResult { guard let worldPointsProcessor = self.worldPointsProcessor else { @@ -98,7 +98,7 @@ public extension AttributeEstimationPipeline { localizationProcessor: LocalizationProcessor, captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { let polygonPoints = accessibilityFeature.contourDetails.normalizedPoints let leftMostPoint = polygonPoints.min { $0.x < $1.x } @@ -147,7 +147,7 @@ public extension AttributeEstimationPipeline { localizationProcessor: LocalizationProcessor, captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> LocationRequestResult { guard let trapezoidBoundPoints = accessibilityFeature.contourDetails.trapezoidPoints, trapezoidBoundPoints.count == 4 else { diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift index 4041c608..ed718a71 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/Location/LocationFromMeshExtension.swift @@ -15,7 +15,7 @@ public extension AttributeEstimationPipeline { localizationProcessor: LocalizationProcessor, captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, - accessibilityFeature: EditableAccessibilityFeature, + accessibilityFeature: any EditableAccessibilityFeatureProtocol, plane: Plane, meshContents: MeshContents ) throws -> LocationRequestResult { guard let worldPointsProcessor = self.worldPointsProcessor else { diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift index 92199a25..d915fe6c 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/CrossSlopeExtension.swift @@ -11,7 +11,7 @@ import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateCrossSlope( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let isMeshEnabled: Bool = self.captureMeshData != nil if isMeshEnabled { @@ -21,7 +21,7 @@ public extension AttributeEstimationPipeline { } func calculateCrossSlopeFromImage( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let worldPoints: [WorldPoint] = try self.prerequisiteCache.worldPoints ?? self.getWorldPoints( accessibilityFeature: accessibilityFeature @@ -44,7 +44,7 @@ public extension AttributeEstimationPipeline { } func calculateCrossSlopeFromMesh( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { /// TODO: For optimization, replace the usage of meshPolygons with meshTriangles (GPU-based) let meshPolygons: [MeshPolygon] = try self.prerequisiteCache.meshPolygons ?? self.getMeshContents( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift index 5ac20098..b079e6d8 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/RunninSlopeExtension.swift @@ -11,7 +11,7 @@ import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateRunningSlope( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let isMeshEnabled: Bool = self.captureMeshData != nil if isMeshEnabled { @@ -26,7 +26,7 @@ public extension AttributeEstimationPipeline { Assumes that the plane being calculated has its first vector aligned with the direction of travel. */ func calculateRunningSlopeFromImage( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let worldPoints: [WorldPoint] = try self.prerequisiteCache.worldPoints ?? self.getWorldPoints( accessibilityFeature: accessibilityFeature @@ -49,7 +49,7 @@ public extension AttributeEstimationPipeline { } func calculateRunningSlopeFromMesh( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { /// TODO: For optimization, replace the usage of meshPolygons with meshTriangles (GPU-based) let meshPolygons: [MeshPolygon] = try self.prerequisiteCache.meshPolygons ?? self.getMeshContents( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift index dbc8b391..0b75af52 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/SurfaceIntegrityExtension.swift @@ -11,7 +11,7 @@ import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateSurfaceIntegrity( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let isMeshEnabled: Bool = self.captureMeshData != nil if isMeshEnabled { @@ -21,7 +21,7 @@ public extension AttributeEstimationPipeline { } func calculateSurfaceIntegrityFromImage( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let captureImageData = self.captureImageData else { throw AttributeEstimationPipelineError.missingCaptureData @@ -61,7 +61,7 @@ public extension AttributeEstimationPipeline { } func calculateSurfaceIntegrityFromMesh( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let captureMeshData = self.captureMeshData else { throw AttributeEstimationPipelineError.missingCaptureData @@ -95,7 +95,7 @@ public extension AttributeEstimationPipeline { return surfaceIntegrityAttributeValue } - func getDamageDetectionResults(accessibilityFeature: EditableAccessibilityFeature) throws -> [DamageDetectionResult] { + func getDamageDetectionResults(accessibilityFeature: any EditableAccessibilityFeatureProtocol) throws -> [DamageDetectionResult] { guard let captureImageData = self.captureImageData else { throw AttributeEstimationPipelineError.missingCaptureData } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift index d96f220d..04caf5eb 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/OtherAttributes/WidthExtension.swift @@ -11,7 +11,7 @@ import PointNMapShaderTypes public extension AttributeEstimationPipeline { func calculateWidth( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { let isMeshEnabled: Bool = self.captureMeshData != nil if isMeshEnabled { @@ -21,7 +21,7 @@ public extension AttributeEstimationPipeline { } func calculateWidthFromImage( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let worldPointsProcessor = self.worldPointsProcessor else { throw AttributeEstimationPipelineError.configurationError( @@ -61,7 +61,7 @@ public extension AttributeEstimationPipeline { } func calculateWidthFromMesh( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> AccessibilityFeatureAttribute.Value { guard let worldPointsProcessor = self.worldPointsProcessor else { throw AttributeEstimationPipelineError.configurationError( diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift index 0305ecbf..e0994b5f 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/AttributeEstimation/Extensions/UtilityExtension.swift @@ -16,7 +16,7 @@ public extension AttributeEstimationPipeline { Get world points corresponding to the feature based on the segmentation label image and depth map, using the world points processor. */ func getWorldPoints( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> [WorldPoint] { guard let captureImageData = self.captureImageData else { throw AttributeEstimationPipelineError.missingCaptureData @@ -39,7 +39,7 @@ public extension AttributeEstimationPipeline { /** Restructure world points into a 2D grid based on their projected pixel coordinates, for more efficient spatial queries. */ - func getWorldPointsGrid(accessibilityFeature: EditableAccessibilityFeature) throws -> WorldPointsGrid { + func getWorldPointsGrid(accessibilityFeature: any EditableAccessibilityFeatureProtocol) throws -> WorldPointsGrid { guard let captureImageData = self.captureImageData else { throw AttributeEstimationPipelineError.missingCaptureData } @@ -64,7 +64,7 @@ public extension AttributeEstimationPipeline { Intermediary method to calculate the plane of the feature given the accessibility feature. */ func calculateAlignedPlane( - accessibilityFeature: EditableAccessibilityFeature, + accessibilityFeature: any EditableAccessibilityFeatureProtocol, worldPoints: [WorldPoint]? = nil ) throws -> Plane { guard let planeProcessorLocal = self.planeProcessor else { @@ -88,7 +88,7 @@ public extension AttributeEstimationPipeline { } func calculateProjectedPlane( - accessibilityFeature: EditableAccessibilityFeature, + accessibilityFeature: any EditableAccessibilityFeatureProtocol, plane: Plane ) throws -> ProjectedPlane { guard let planeProcessor = self.planeProcessor else { @@ -113,7 +113,7 @@ public extension AttributeEstimationPipeline { */ public extension AttributeEstimationPipeline { func getMeshContents( - accessibilityFeature: EditableAccessibilityFeature + accessibilityFeature: any EditableAccessibilityFeatureProtocol ) throws -> MeshContents { guard let captureMeshData = self.captureMeshData else { throw AttributeEstimationPipelineError.missingCaptureData @@ -129,7 +129,7 @@ public extension AttributeEstimationPipeline { } func calculateAlignedPlane( - accessibilityFeature: EditableAccessibilityFeature, + accessibilityFeature: any EditableAccessibilityFeatureProtocol, meshPolygons: [MeshPolygon]? = nil ) throws -> Plane { guard let planeProcessorLocal = self.planeProcessor else { diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift index 8fedcce7..cbce8a3f 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureConfig.swift @@ -11,7 +11,7 @@ import ARKit public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Comparable, CustomStringConvertible { public let id: String public let name: String - public let kind: AccessibilityFeatureKind? + public let kind: AccessibilityFeatureKind /** Segmentation-related constants @@ -45,7 +45,7 @@ public struct AccessibilityFeatureClass: Identifiable, Hashable, Sendable, Compa // public let oswPolicy: OSWPolicy public init( - id: String, name: String, kind: AccessibilityFeatureKind? = nil, + id: String, name: String, kind: AccessibilityFeatureKind = .default, grayscaleValue: Float, labelValue: UInt8, color: CIColor, bounds: CGRect? = nil, unionOfMasksPolicy: UnionOfMasksPolicy = .default, meshClassification: Set = [], diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift index 3b8a7855..a5c91736 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/AccessibilityFeatureKind.swift @@ -15,12 +15,13 @@ public enum AccessibilityFeatureKind: String, Identifiable, Codable, CaseIterabl case trafficLight = "traffic_light" case trafficSign = "traffic_sign" case vegetation = "vegetation" + case unknown = "unknown" public var id: String { return self.rawValue } - public var geometry: FeatureGeometry { + public var geometry: MappingGeometry { switch self { case .sidewalk: return .linestring case .building: return .polygon @@ -49,3 +50,7 @@ public enum AccessibilityFeatureKind: String, Identifiable, Codable, CaseIterabl } } } + +extension AccessibilityFeatureKind { + public static let `default`: AccessibilityFeatureKind = .unknown +} diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift index 36a6105a..440bf707 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Definitions/EditableAccessibilityFeature.swift @@ -7,7 +7,43 @@ import Foundation import CoreLocation -public class EditableAccessibilityFeature: Identifiable, Equatable, AccessibilityFeatureProtocol, DetectedFeatureProtocol { +public protocol EditableAccessibilityFeatureProtocol: AccessibilityFeatureProtocol, DetectedFeatureProtocol { + var id: UUID { get } + var selectedAnnotationOption: AnnotationOption { get set } + var locationDetails: LocationDetails? { get set } + var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] { get set } + var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] { get set } + var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] { get set } + + init( + id: UUID, + detectedAccessibilityFeature: DetectedAccessibilityFeature + ) + func setAnnotationOption(_ option: AnnotationOption) + func getLastLocationCoordinate() -> CLLocationCoordinate2D? + func setLocationDetails(locationDetails: LocationDetails) + func setAttributeValue( + _ value: AccessibilityFeatureAttribute.Value, + for attribute: AccessibilityFeatureAttribute, + isCalculated: Bool, + isFinal: Bool + ) throws + func setAttributeValue( + _ value: AccessibilityFeatureAttribute.Value, + for attribute: AccessibilityFeatureAttribute, + isCalculated: Bool + ) throws + func setAttributeValue( + _ value: AccessibilityFeatureAttribute.Value, + for attribute: AccessibilityFeatureAttribute + ) throws + func setExperimentalAttributeValue( + _ value: AccessibilityFeatureAttribute.Value, + for attribute: AccessibilityFeatureAttribute + ) throws +} + +open class EditableAccessibilityFeature: EditableAccessibilityFeatureProtocol { public let id: UUID public let accessibilityFeatureClass: AccessibilityFeatureClass @@ -17,16 +53,12 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit public var selectedAnnotationOption: AnnotationOption = .individualOption(.default) public var locationDetails: LocationDetails? - /// If isExisting is false, even if an osw element is associated, it means the feature is new. - /// If isExisting is true, it means the feature corresponds to an existing real-world feature, and the oswElement (if present) represents that existing feature in OSW. - public var isExisting: Bool = false - public var oswElement: (any OSWElement)? public var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] public var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] = [:] public var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] = [:] - public init( + public required init( id: UUID = UUID(), detectedAccessibilityFeature: DetectedAccessibilityFeature ) { @@ -36,13 +68,13 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit self.locationDetails = nil - calculatedAttributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.attributes.map { attribute in + calculatedAttributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.kind.attributes.map { attribute in return (attribute, nil) }) - attributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.attributes.map { attribute in + attributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.kind.attributes.map { attribute in return (attribute, nil) }) - experimentalAttributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.experimentalAttributes.map { attribute in + experimentalAttributeValues = Dictionary(uniqueKeysWithValues: accessibilityFeatureClass.kind.experimentalAttributes.map { attribute in return (attribute, nil) }) } @@ -52,8 +84,6 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit accessibilityFeatureClass: AccessibilityFeatureClass, contourDetails: ContourDetails, locationDetails: LocationDetails?, - isExisting: Bool = false, - oswElement: (any OSWElement)? = nil, calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?], attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?], experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] @@ -62,8 +92,6 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit self.contourDetails = contourDetails self.accessibilityFeatureClass = accessibilityFeatureClass self.locationDetails = locationDetails - self.isExisting = isExisting - self.oswElement = oswElement self.calculatedAttributeValues = calculatedAttributeValues self.attributeValues = attributeValues self.experimentalAttributeValues = experimentalAttributeValues @@ -83,14 +111,6 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit self.locationDetails = locationDetails } - public func setIsExisting(_ isExisting: Bool) { - self.isExisting = isExisting - } - - public func setOSWElement(oswElement: any OSWElement) { - self.oswElement = oswElement - } - public func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute, @@ -108,6 +128,14 @@ public class EditableAccessibilityFeature: Identifiable, Equatable, Accessibilit } } + public func setAttributeValue( + _ value: AccessibilityFeatureAttribute.Value, + for attribute: AccessibilityFeatureAttribute, + isCalculated: Bool = false + ) throws { + try setAttributeValue(value, for: attribute, isCalculated: isCalculated, isFinal: true) + } + public func setAttributeValue( _ value: AccessibilityFeatureAttribute.Value, for attribute: AccessibilityFeatureAttribute diff --git a/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift index 0ff14b0c..35106b31 100644 --- a/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift +++ b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageManager.swift @@ -8,7 +8,7 @@ import SwiftUI import Combine import simd -enum AnnotationImageManagerError: Error, LocalizedError { +public enum AnnotationImageManagerError: Error, LocalizedError { case notConfigured case segmentationNotConfigured case captureDataNotAvailable @@ -21,7 +21,7 @@ enum AnnotationImageManagerError: Error, LocalizedError { case invalidMeshData case meshRasterizationFailed - var errorDescription: String? { + public var errorDescription: String? { switch self { case .notConfigured: return "AnnotationImageManager is not configured." @@ -49,29 +49,37 @@ enum AnnotationImageManagerError: Error, LocalizedError { } } -struct AnnotationImageResults { - let cameraImage: CIImage - let segmentationLabelImage: CIImage +public struct AnnotationImageResults { + public let cameraImage: CIImage + public let segmentationLabelImage: CIImage - var alignedSegmentationLabelImages: [CIImage]? - var processedSegmentationLabelImage: CIImage? = nil - var featuresSourceCGImage: CGImage? = nil + public var alignedSegmentationLabelImages: [CIImage]? + public var processedSegmentationLabelImage: CIImage? = nil + public var featuresSourceCGImage: CGImage? = nil - var cameraOutputImage: CIImage? = nil - var segmentationOverlayOutputImage: CIImage? = nil - var featuresOverlayOutputImage: CIImage? = nil + public var cameraOutputImage: CIImage? = nil + public var segmentationOverlayOutputImage: CIImage? = nil + public var featuresOverlayOutputImage: CIImage? = nil } -struct AnnotationImageFeatureUpdateResults: Sendable { - let plane: Plane - let projectedPlane: ProjectedPlane - let damageDetectionResults: [DamageDetectionResult] +public struct AnnotationImageFeatureUpdateResults: Sendable { + public let plane: Plane + public let projectedPlane: ProjectedPlane + public let damageDetectionResults: [DamageDetectionResult] + + public init(plane: Plane, projectedPlane: ProjectedPlane, damageDetectionResults: [DamageDetectionResult]) { + self.plane = plane + self.projectedPlane = projectedPlane + self.damageDetectionResults = damageDetectionResults + } } /** A class to manage annotation image processing including segmentation mask post-processing and feature detection. */ -final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageProcessingDelegate { +public final class AnnotationImageManager< + Feature: EditableAccessibilityFeature + >: NSObject, ObservableObject, AnnotationImageProcessingDelegate { private var selectedClasses: [AccessibilityFeatureClass] = [] private var segmentationAnnotationPipeline: SegmentationAnnotationPipeline? = nil private var grayscaleToColorFilter: GrayscaleToColorFilter? = nil @@ -81,18 +89,18 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP private var captureMeshData: (any CaptureMeshDataProtocol)? = nil var isEnhancedAnalysisEnabled: Bool = false - weak var outputConsumer: AnnotationImageProcessingOutputConsumer? = nil - @Published var interfaceOrientation: UIInterfaceOrientation = .portrait + public weak var outputConsumer: AnnotationImageProcessingOutputConsumer? = nil + @Published public var interfaceOrientation: UIInterfaceOrientation = .portrait private let context = CIContext(options: nil) - @Published var isConfigured: Bool = false + @Published public var isConfigured: Bool = false // Latest processed results - var annotationImageResults: AnnotationImageResults? + public var annotationImageResults: AnnotationImageResults? /// TODO: MESH PROCESSING: Integrate mesh data processing in the annotation image manager. - func configure( + public func configure( selectedClasses: [AccessibilityFeatureClass], segmentationAnnotationPipeline: SegmentationAnnotationPipeline, captureImageData: (any CaptureImageDataProtocol), captureMeshData: (any CaptureMeshDataProtocol)?, @@ -128,7 +136,7 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP Sets up aligned segmentation label images from the capture data history. MARK: Does not throw errors, since this is not critical to the annotation image processing. */ - func setupAlignedSegmentationLabelImages( + public func setupAlignedSegmentationLabelImages( captureDataHistory: [CaptureImageData] ) { guard let _ = self.captureImageData, @@ -140,7 +148,7 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP self.annotationImageResults = annotationImageResults } - func setOrientation(_ orientation: UIInterfaceOrientation) { + public func setOrientation(_ orientation: UIInterfaceOrientation) { Task { await MainActor.run { self.interfaceOrientation = orientation @@ -151,9 +159,9 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP /** Updates the camera image, and recreates the overlay image. */ - func updateFeatureClass( + public func updateFeatureClass( accessibilityFeatureClass: AccessibilityFeatureClass - ) throws -> [EditableAccessibilityFeature] { + ) throws -> [Feature] { guard isConfigured else { throw AnnotationImageManagerError.notConfigured } @@ -227,9 +235,9 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP return accessibilityFeatures } - func updateFeature( + public func updateFeature( accessibilityFeatureClass: AccessibilityFeatureClass, - accessibilityFeatures: [EditableAccessibilityFeature], + accessibilityFeatures: [Feature], featureSelectedStatus: [UUID: Bool], updateFeatureResults: AnnotationImageFeatureUpdateResults? ) throws { @@ -295,7 +303,7 @@ final class AnnotationImageManager: NSObject, ObservableObject, AnnotationImageP /** Extension to handle camera image processing: orientation and cropping. */ -extension AnnotationImageManager { +public extension AnnotationImageManager { private func getCameraOutputImage() throws -> CIImage { guard let captureImageData = self.captureImageData else { throw AnnotationImageManagerError.captureDataNotAvailable @@ -321,7 +329,7 @@ extension AnnotationImageManager { /** Extension to handle segmentation mask post-processing. */ -extension AnnotationImageManager { +public extension AnnotationImageManager { /** Aligns the segmentation label images from the capture data history to the current capture data. MARK: Does not throw errors, instead returns empty array on failure, since this is not critical to the annotation image processing. @@ -419,11 +427,11 @@ extension AnnotationImageManager { /** Extension to handle feature detection. */ -extension AnnotationImageManager { +public extension AnnotationImageManager { private func getAccessibilityFeatures( segmentationLabelImage: CIImage, accessibilityFeatureClass: AccessibilityFeatureClass - ) throws -> [EditableAccessibilityFeature] { + ) throws -> [Feature] { guard let segmentationAnnotationPipeline = self.segmentationAnnotationPipeline, let captureImageData = self.captureImageData else { throw AnnotationImageManagerError.segmentationNotConfigured @@ -437,14 +445,14 @@ extension AnnotationImageManager { orientation: imageOrientation ) let accessibilityFeatures = detectedFeatures.map { detectedFeature in - return EditableAccessibilityFeature( + return Feature( detectedAccessibilityFeature: detectedFeature ) } return accessibilityFeatures } - private func getFeaturesOverlayOutputImageWithSource(accessibilityFeatures: [EditableAccessibilityFeature]) + private func getFeaturesOverlayOutputImageWithSource(accessibilityFeatures: [Feature]) throws -> (sourceCGImage: CGImage, overlayImage: CIImage) { guard let captureImageData = self.captureImageData else { throw AnnotationImageManagerError.captureDataNotAvailable @@ -466,7 +474,7 @@ extension AnnotationImageManager { private func updateFeaturesOverlayOutputImageWithSource( sourceCGImage: CGImage, - accessibilityFeatures: [EditableAccessibilityFeature], + accessibilityFeatures: [Feature], featureSelectedStatus: [UUID: Bool] ) throws -> (sourceCGImage: CGImage, overlayImage: CIImage) { guard let captureImageData = self.captureImageData else { @@ -534,7 +542,7 @@ extension AnnotationImageManager { TODO: MESH PROCESSING: Integrate mesh data processing in the annotation image manager. */ -extension AnnotationImageManager { +public extension AnnotationImageManager { private func getMeshOverlayOutputImage( captureMeshData: (any CaptureMeshDataProtocol), polygonsNormalizedCoordinates: [(SIMD2, SIMD2, SIMD2)], @@ -610,7 +618,7 @@ extension AnnotationImageManager { /** Additional images for debugging */ -extension AnnotationImageManager { +public extension AnnotationImageManager { private func getPlaneImage( captureImageData: (any CaptureImageDataProtocol), size: CGSize, diff --git a/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift index 1ddf517a..d213ab85 100644 --- a/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift +++ b/PointNMapShared/Sources/PointNMap/Annotation/AnnotationImageViewController.swift @@ -9,22 +9,22 @@ import SwiftUI import Combine @MainActor -protocol AnnotationImageProcessingOutputConsumer: AnyObject { +public protocol AnnotationImageProcessingOutputConsumer: AnyObject { func annotationOutputImage( _ delegate: AnnotationImageProcessingDelegate, image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?, overlay3Image: CIImage? ) } -protocol AnnotationImageProcessingDelegate: AnyObject { +public protocol AnnotationImageProcessingDelegate: AnyObject { @MainActor var outputConsumer: AnnotationImageProcessingOutputConsumer? { get set } @MainActor func setOrientation(_ orientation: UIInterfaceOrientation) } -class AnnotationImageViewController: UIViewController, AnnotationImageProcessingOutputConsumer { - var annotationImageManager: AnnotationImageManager +public class AnnotationImageViewController: UIViewController, AnnotationImageProcessingOutputConsumer { + public var annotationImageManager: AnnotationImageManager private let subView = UIView() @@ -69,16 +69,16 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing return iv }() - init(annotationImageManager: AnnotationImageManager) { + public init(annotationImageManager: AnnotationImageManager) { self.annotationImageManager = annotationImageManager super.init(nibName: nil, bundle: nil) } - required init?(coder: NSCoder) { + public required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") } - override func viewDidLoad() { + public override func viewDidLoad() { super.viewDidLoad() view.addSubview(subView) @@ -115,7 +115,7 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing ]) } - func getOrientation() -> UIInterfaceOrientation { + public func getOrientation() -> UIInterfaceOrientation { // TODO: While we are requested to replace usage with effectiveGeometry.interfaceOrientation, // it seems to cause issues with getting the correct orientation. // Need to investigate further. @@ -129,7 +129,7 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing return .landscapeLeft } - func annotationOutputImage( + public func annotationOutputImage( _ delegate: AnnotationImageProcessingDelegate, image: CIImage?, overlayImage: CIImage?, overlay2Image: CIImage?, overlay3Image: CIImage? ) { @@ -153,12 +153,12 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing } } - override func viewDidLayoutSubviews() { + public override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() annotationImageManager.setOrientation(getOrientation()) } - override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { + public override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { super.viewWillTransition(to: size, with: coordinator) coordinator.animate(alongsideTransition: { _ in self.annotationImageManager.setOrientation(self.getOrientation()) @@ -167,17 +167,21 @@ class AnnotationImageViewController: UIViewController, AnnotationImageProcessing } } -struct HostedAnnotationImageViewController: UIViewControllerRepresentable{ - var annotationImageManager: AnnotationImageManager +public struct HostedAnnotationImageViewController: UIViewControllerRepresentable{ + public var annotationImageManager: AnnotationImageManager + + public init(annotationImageManager: AnnotationImageManager) { + self.annotationImageManager = annotationImageManager + } - func makeUIViewController(context: Context) -> AnnotationImageViewController { + public func makeUIViewController(context: Context) -> AnnotationImageViewController { let vc = AnnotationImageViewController(annotationImageManager: annotationImageManager) return vc } - func updateUIViewController(_ uiViewController: AnnotationImageViewController, context: Context) { + public func updateUIViewController(_ uiViewController: AnnotationImageViewController, context: Context) { } - static func dismantleUIViewController(_ uiViewController: AnnotationImageViewController, coordinator: ()) { + public static func dismantleUIViewController(_ uiViewController: AnnotationImageViewController, coordinator: ()) { } } diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift index 1adcc524..0b97a2e0 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationARPipeline.swift @@ -86,6 +86,8 @@ public final class SegmentationARPipeline: ObservableObject { private var segmentationModelRequestProcessor: SegmentationModelRequestProcessor? private var contourRequestProcessor: ContourRequestProcessor? + public init() {} + public func configure() throws { self.segmentationModelRequestProcessor = try SegmentationModelRequestProcessor( selectedClasses: self.selectedClasses) diff --git a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift index defc2760..3bdbcdc1 100644 --- a/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift +++ b/PointNMapShared/Sources/PointNMap/MachineLearning/Segmentation/SegmentationAnnotationPipeline.swift @@ -74,6 +74,8 @@ public final class SegmentationAnnotationPipeline: ObservableObject { /// TODO: Replace with the global Metal context private let context = CIContext() + public init() { } + public func configure() throws { self.contourRequestProcessor = try ContourRequestProcessor( contourEpsilon: self.contourEpsilon, @@ -225,8 +227,7 @@ public final class SegmentationAnnotationPipeline: ObservableObject { /// TODO: Handle sidewalk feature differently if needed, and improve the relevant trapezoid-creation logic. let largestFeature = detectedFeatures.sorted(by: {$0.contourDetails.area > $1.contourDetails.area}).first guard let largestFeature = largestFeature, - let largestFeatureClassKind = accessibilityFeatureClass.kind, - largestFeatureClassKind == .sidewalk else { + accessibilityFeatureClass.kind == .sidewalk else { self.isProcessing = false return detectedFeatures } diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift b/PointNMapShared/Sources/PointNMap/Shared/Definitions/MappingGeometry.swift similarity index 69% rename from PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift rename to PointNMapShared/Sources/PointNMap/Shared/Definitions/MappingGeometry.swift index 260db73f..d0bec559 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Components/FeatureGeometry.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/Definitions/MappingGeometry.swift @@ -1,11 +1,11 @@ // -// FeatureGeometry.swift +// MappingGeometry.swift // IOSAccessAssessment // // Created by Himanshu on 4/30/26. // -public enum FeatureGeometry: String, CaseIterable, Hashable, Codable { +public enum MappingGeometry: String, CaseIterable, Hashable, Codable, Sendable { case point case linestring case polygon @@ -23,6 +23,6 @@ public enum FeatureGeometry: String, CaseIterable, Hashable, Codable { } } -public extension FeatureGeometry { - static let `default`: FeatureGeometry = .point +public extension MappingGeometry { + static let `default`: MappingGeometry = .point } diff --git a/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift index f35d4c59..846302a8 100644 --- a/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift +++ b/PointNMapShared/Sources/PointNMap/Shared/PointNMapConstants.swift @@ -8,7 +8,7 @@ import SwiftUI public struct PointNMapConstants { // Supported Classes - static let SelectedAccessibilityFeatureConfig: AccessibilityFeatureClassConfig = AccessibilityFeatureConfig.mapillaryCustom11Config + public static let SelectedAccessibilityFeatureConfig: AccessibilityFeatureClassConfig = AccessibilityFeatureConfig.mapillaryCustom11Config public struct DepthConstants { /// Model-specific SharedAppConstants diff --git a/PointNMapShared/Sources/PointNMap/Shared/SharedBaseContext.swift b/PointNMapShared/Sources/PointNMap/Shared/SharedBaseContext.swift new file mode 100644 index 00000000..3d879fd7 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/Shared/SharedBaseContext.swift @@ -0,0 +1,17 @@ +// +// SharedBaseContext.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/24/25. +// +import SwiftUI +import Combine + +final class SharedBaseContext: ObservableObject { + var metalContext: MetalContext? + var isEnhancedAnalysisEnabled: Bool = false + + func configure() throws { + metalContext = try MetalContext() + } +} diff --git a/PointNMapShared/Sources/PointNMap/Shared/SharedBaseData.swift b/PointNMapShared/Sources/PointNMap/Shared/SharedBaseData.swift new file mode 100644 index 00000000..0a9ea1cf --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/Shared/SharedBaseData.swift @@ -0,0 +1,44 @@ +// +// SharedBaseData.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/9/25. +// + +import SwiftUI +import Combine +import simd + +@MainActor +final class SharedBaseData: ObservableObject { + @Published var isUploadReady: Bool = false + var isLidarAvailable: Bool = ARCameraUtils.checkDepthSupport() + + var currentCaptureDataRecord: CaptureData? + /// A queue to hold recent capture image data. + var captureDataQueue: SafeDeque + var captureDataCapacity: Int + + init(captureDataCapacity: Int = 5) { + self.captureDataCapacity = captureDataCapacity + self.captureDataQueue = SafeDeque(capacity: captureDataCapacity) + } + + func refreshQueue() async { + await self.captureDataQueue.removeAll() + } + + func refreshData() { + self.isUploadReady = false + self.currentCaptureDataRecord = nil + } + + func saveCaptureData(_ data: CaptureData) { + self.currentCaptureDataRecord = data + } + + func appendCaptureDataToQueue(_ data: (any CaptureImageDataProtocol)) async { + let captureImageData = CaptureImageData(data) + await self.captureDataQueue.appendBack(captureImageData) + } +} diff --git a/IOSAccessAssessment/Shared/Utils/SafeDeque.swift b/PointNMapShared/Sources/PointNMap/Shared/Utils/SafeDeque.swift similarity index 100% rename from IOSAccessAssessment/Shared/Utils/SafeDeque.swift rename to PointNMapShared/Sources/PointNMap/Shared/Utils/SafeDeque.swift diff --git a/PointNMapShared/Sources/PointNMap/View/ARCameraViewBase.swift b/PointNMapShared/Sources/PointNMap/View/ARCameraViewBase.swift new file mode 100644 index 00000000..f9d72314 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/ARCameraViewBase.swift @@ -0,0 +1,353 @@ +// +// ARCameraViewBase.swift +// IOSAccessAssessment +// +// Created by Kohei Matsushima on 2024/03/29. +// + +import SwiftUI +import Combine +import AVFoundation +import Vision +import Metal +import CoreImage +import MetalKit +import CoreLocation + +enum ARCameraViewBaseConstants { + enum Texts { + static let contentViewTitle = "Capture" + + static let cameraInProgressText = "Camera settings in progress" + + /// Camera Hint Texts + static let cameraHintPlaceholderText = "..." + static let cameraHintNoMeshText = "No Mesh Captured" + static let cameraHintNoSegmentationText = "No Features Detected" + static let cameraHintMeshNotProcessedText = "Features Not Processed" + static let cameraHintLocationErrorText = "Location Error" + static let cameraHintUnknownErrorText = "Unknown Error" + static let cameraHintMappingDataNotReadyText = "Mapping Data Not Ready" + + /// Manager Status Alert + static let managerStatusAlertTitleKey = "Error" + static let managerStatusAlertDismissButtonKey = "OK" + + /// Mapping Data Status Alert + static let mappingDataStatusAlertTitleKey = "Error" + static let mappingDataStatusAlertRetryButtonKey = "Retry" + static let mappingDataStatusAlertDismissButtonKey = "OK" + + /// Invalid Content View + static let invalidContentViewTitle = "Invalid Capture" + static let invalidContentViewMessage = "The captured data is invalid. Please try again." + + /// ARCameraLearnMoreSheetView + static let arCameraLearnMoreSheetTitle = "About Capture" + static let arCameraLearnMoreSheetMessage = """ + Use this screen to capture accessibility features in your environment. + + Point your device's camera at the area you want to capture, and press the Camera Button to take a snapshot. + + After capturing, you will be prompted to validate the annotated features. + """ + } + + enum Images { + static let cameraIcon = "camera.circle.fill" + + /// InfoTio + static let infoIcon = "info.circle" + } + + enum Colors { + static let selectedClass = Color(red: 187/255, green: 134/255, blue: 252/255) + static let unselectedClass = Color.primary + } + + enum Constraints { + static let logoutIconSize: CGFloat = 20 + } +} + +enum ARCameraViewBaseError: Error, LocalizedError { + case captureNoSegmentationAccessibilityFeatures + case workspaceConfigurationFailed + case authenticationError + case mappingDataNotReady + + var errorDescription: String? { + switch self { + case .captureNoSegmentationAccessibilityFeatures: + return "No accessibility features were captured. Please try again." + case .workspaceConfigurationFailed: + return "Workspace configuration failed. Please check your workspace settings." + case .authenticationError: + return "Authentication error. Please log in again." + case .mappingDataNotReady: + return "Mapping data is not ready yet. Please wait a moment and try again." + } + } +} + +class ARCameraBaseManagerStatusViewModel: ObservableObject { + @Published var isFailed: Bool = false + @Published var errorMessage: String = "" + + func update(isFailed: Bool, errorMessage: String) { + self.isFailed = isFailed + self.errorMessage = errorMessage + } +} + +public struct ARCameraView: View { + let selectedClasses: [AccessibilityFeatureClass] + + @EnvironmentObject var sharedAppData: SharedBaseData + @EnvironmentObject var sharedAppContext: SharedBaseContext + @EnvironmentObject var segmentationPipeline: SegmentationARPipeline + @Environment(\.dismiss) var dismiss + + @StateObject private var manager: ARCameraManager = ARCameraManager() + @StateObject private var managerConfigureStatusViewModel = ARCameraBaseManagerStatusViewModel() + @State private var cameraHintText: String = ARCameraViewBaseConstants.Texts.cameraHintPlaceholderText + + @StateObject private var locationManager: LocationManager = LocationManager() + + @State private var showARCameraLearnMoreSheet = false + + @State private var showAnnotationView = false + + public var body: some View { + Group { + // Show the camera view once manager is initialized, otherwise a loading indicator + if manager.isConfigured { + orientationStack { + HostedARCameraViewContainer(arSessionCameraProcessingDelegate: manager) + VStack { + /// Text for hinting user with status + Text(cameraHintText) + .padding() + .background(.regularMaterial, in: RoundedRectangle(cornerRadius: 8)) + .frame(maxWidth: 300) + .lineLimit(1) + .truncationMode(.tail) + + reverseOrientationStack { + Spacer() + Button { + cameraCapture() + } label: { + Image(systemName: ARCameraViewBaseConstants.Images.cameraIcon) + .resizable() + .frame(width: 60, height: 60) + } + .padding(.bottom, 20) + Spacer() + } + .overlay( + reverseOrientationStack { + Spacer() + Button(action: { + showARCameraLearnMoreSheet = true + }) { + Image(systemName: ARCameraViewBaseConstants.Images.infoIcon) + .resizable() + .frame(width: 20, height: 20) + } + .padding(.horizontal, 20) + .padding(.bottom, 20) + } + ) + } + } + } else { + ProgressView(ARCameraViewBaseConstants.Texts.cameraInProgressText) + } + } + .navigationBarTitle(ARCameraViewBaseConstants.Texts.contentViewTitle, displayMode: .inline) + .onAppear { + locationManager.startLocationUpdates() + showAnnotationView = false + segmentationPipeline.setSelectedClasses(selectedClasses) + do { + try manager.configure( + selectedClasses: selectedClasses, segmentationPipeline: segmentationPipeline, + metalContext: sharedAppContext.metalContext, + isEnhancedAnalysisEnabled: sharedAppContext.isEnhancedAnalysisEnabled, + cameraOutputImageCallback: cameraOutputImageCallback + ) + } catch { + managerConfigureStatusViewModel.update(isFailed: true, errorMessage: error.localizedDescription) + } + } + .onDisappear { + Task { + do { + try manager.pause() + locationManager.stopLocationUpdates() + } catch { + print("Error pausing ARCameraManager: \(error)") + } + } + } + .alert(ARCameraViewBaseConstants.Texts.managerStatusAlertTitleKey, isPresented: $managerConfigureStatusViewModel.isFailed, actions: { + Button(ARCameraViewBaseConstants.Texts.managerStatusAlertDismissButtonKey) { + managerConfigureStatusViewModel.update(isFailed: false, errorMessage: "") + dismiss() + } + }, message: { + Text(managerConfigureStatusViewModel.errorMessage) + }) + .fullScreenCover(isPresented: $showAnnotationView) { + if let captureLocation = locationManager.currentLocation?.coordinate { + AnnotationViewBase( + selectedClasses: selectedClasses, captureLocation: captureLocation + ) + } else { + InvalidContentView( + title: ARCameraViewBaseConstants.Texts.invalidContentViewTitle, + message: ARCameraViewBaseConstants.Texts.invalidContentViewMessage + ) + } + } + .onChange(of: showAnnotationView, initial: false) { oldValue, newValue in + // If the AnnotationView is dismissed, clear capture history and reconfigure the manager for a new session + Task { + if (oldValue == true && newValue == false) { + do { + locationManager.startLocationUpdates() + await sharedAppData.refreshQueue() + try manager.resume() + } catch { + managerConfigureStatusViewModel.update(isFailed: true, errorMessage: error.localizedDescription) + } + } + } + } + .onChange(of: manager.interfaceOrientation) { oldOrientation, newOrientation in + locationManager.updateOrientation(newOrientation) + } + .onChange(of: locationManager.currentLocation) { oldLocation, newLocation in + handleLocationUpdate(oldLocation: oldLocation, newLocation: newLocation) + } + .sheet(isPresented: $showARCameraLearnMoreSheet) { + ARCameraLearnMoreSheetView() + .presentationDetents([.medium, .large]) + } + } + + @ViewBuilder + private func orientationStack(@ViewBuilder content: () -> Content) -> some View { + manager.interfaceOrientation.isLandscape ? + AnyLayout(HStackLayout())(content) : + AnyLayout(VStackLayout())(content) + } + + @ViewBuilder + private func reverseOrientationStack(@ViewBuilder content: () -> Content) -> some View { + manager.interfaceOrientation.isLandscape ? + AnyLayout(VStackLayout())(content) : + AnyLayout(HStackLayout())(content) + } + + private func cameraOutputImageCallback(_ captureImageData: (any CaptureImageDataProtocol)) { + Task { + await sharedAppData.appendCaptureDataToQueue(captureImageData) + } + } + + private func cameraCapture() { + Task { + do { + let captureData: CaptureData = try await manager.performFinalSessionUpdateIfPossible() + switch captureData { + case .imageData(let data): + if (data.captureImageDataResults.segmentedClasses.isEmpty) + { + throw ARCameraViewBaseError.captureNoSegmentationAccessibilityFeatures + } + case .imageAndMeshData(let data): + if (data.captureImageDataResults.segmentedClasses.isEmpty) + || (data.captureMeshDataResults.segmentedMesh.totalVertexCount == 0) + { + throw ARCameraViewBaseError.captureNoSegmentationAccessibilityFeatures + } + } + try manager.pause() + locationManager.stopLocationUpdates() + /// Get location. Done after pausing the manager to avoid delays, despite being less accurate. + sharedAppData.saveCaptureData(captureData) + addCaptureDataToCurrentDataset( + captureImageData: captureData.imageData, captureMeshData: captureData.meshData, + location: locationManager.currentLocation?.coordinate, heading: locationManager.currentHeading?.trueHeading + ) + showAnnotationView = true + } catch ARCameraManagerError.finalSessionMeshUnavailable { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintNoMeshText) + } catch ARCameraManagerError.finalSessionNoSegmentationClass, + ARCameraViewBaseError.captureNoSegmentationAccessibilityFeatures { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintNoSegmentationText) + } catch ARCameraManagerError.finalSessionNoSegmentationMesh { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintMeshNotProcessedText) + } catch ARCameraViewBaseError.mappingDataNotReady { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintMappingDataNotReadyText) + } catch _ as LocationManagerError { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintLocationErrorText) + } catch { + setHintText(ARCameraViewBaseConstants.Texts.cameraHintUnknownErrorText) + } + } + } + + private func addCaptureDataToCurrentDataset( + captureImageData: any CaptureImageDataProtocol, + captureMeshData: (any CaptureMeshDataProtocol)? = nil, + location: CLLocationCoordinate2D?, + heading: CLLocationDirection? + ) { + } + + private func handleLocationUpdate(oldLocation: CLLocation?, newLocation: CLLocation?) { + var shouldUpdateMap = oldLocation == nil && newLocation != nil + if let oldLocation, let newLocation { + let distance = oldLocation.distance(from: newLocation) + shouldUpdateMap = distance > PointNMapConstants.WorkspaceConstants.fetchUpdateRadiusThresholdInMeters + } + if !shouldUpdateMap { + return + } + } + + /// Set text for 2 seconds, and then fall back to placeholder + private func setHintText(_ text: String) { + cameraHintText = text + Task { + try await Task.sleep(for: .seconds(2)) + cameraHintText = ARCameraViewBaseConstants.Texts.cameraHintPlaceholderText + } + } +} + +struct ARCameraLearnMoreSheetView: View { + @Environment(\.dismiss) + var dismiss + + var body: some View { + VStack(spacing: 20) { +// Image(systemName: "number") +// .resizable() +// .scaledToFit() +// .frame(width: 160) +// .foregroundStyle(.accentColor) + Text(ARCameraViewBaseConstants.Texts.arCameraLearnMoreSheetTitle) + .font(.headline) + Text(ARCameraViewBaseConstants.Texts.arCameraLearnMoreSheetMessage) + .foregroundStyle(.secondary) + Button("Dismiss") { + dismiss() + } + } + .padding(.horizontal, 40) + } +} diff --git a/PointNMapShared/Sources/PointNMap/View/AnnotationViewBase.swift b/PointNMapShared/Sources/PointNMap/View/AnnotationViewBase.swift new file mode 100644 index 00000000..b4fb620a --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/AnnotationViewBase.swift @@ -0,0 +1,623 @@ +// +// AnnotationViewBase.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/10/25. +// + +import SwiftUI +import Combine +import TipKit +import CoreLocation +import simd + +enum AnnotationViewBaseConstants { + enum Texts { + static let annotationViewTitle = "Annotation" + + static let currentClassPrefixText = "Selected class" + static let finishText = "Finish" + static let nextText = "Next" + + static let selectObjectText = "Select an object" + + static let loadingPageText = "Loading. Please wait..." + + /// Feature Detail View Text + static let featureDetailViewTitle = "Feature Details" + static let featureDetailViewIdKey = "ID" + static let featureDetailViewLocationKey = "Location" + static let featureDetailNotAvailableText = "Not Available" + + /// Alert texts + static let managerStatusAlertTitleKey = "Error" + static let managerStatusAlertDismissButtonKey = "OK" + static let managerStatusAlertMessageDismissScreenSuffixKey = "Press OK to close this screen." + static let managerStatusAlertMessageDismissAlertSuffixKey = "Press OK to dismiss this alert." + static let apiChangesetUploadStatusAlertTitleKey = "Upload Error" + static let apiChangesetUploadStatusAlertDismissButtonKey = "OK" + static let apiChangesetUploadStatusAlertGenericMessageKey = "Failed to upload features. Press OK to dismiss this alert." + static let apiChangesetUploadStatusAlertMessageSuffixKey = " feature(s) failed to upload. Press OK to dismiss this alert." + + /// SelectObjectInfoTip + static let selectFeatureInfoTipTitle = "Select a Feature" + static let selectFeatureInfoTipMessage = "Please select the individual feature that you want to annotate" + static let selectFeatureInfoTipLearnMoreButtonTitle = "Learn More" + + /// SelectObjectInfoLearnMoreSheetView + static let selectFeatureInfoLearnMoreSheetTitle = "Annotating an Individual Feature" + static let selectFeatureInfoLearnMoreSheetMessage = """ + For each type of accessibility feature, the app can identify multiple feature instances within the same image. + + **Select All**: Default option. You can annotate all features of a particular type together. + + **Individual**: You can select a particular feature from the dropdown menu if you wish to provide specific annotations for an individual feature. + + **Ellipsis [...]**: For each feature, you can also view its details by tapping the ellipsis button next to the dropdown menu. + """ + } + + enum Images { + static let checkIcon = "checkmark" + static let ellipsisIcon = "ellipsis" + static let infoIcon = "info.circle" + static let closeIcon = "xmark" + static let errorIcon = "exclamationmark.triangle" + } +} + +enum AnnotationViewBaseError: Error, LocalizedError { + case classIndexOutofBounds + case instanceIndexOutofBounds + case invalidCaptureDataRecord + case managerConfigurationFailed + case authenticationError + case workspaceConfigurationFailed + case attributeEstimationFailed(Error) + case uploadFailed + + var errorDescription: String? { + switch self { + case .classIndexOutofBounds: + return "The Current Class is not in the list." + case .instanceIndexOutofBounds: + return "Exceeded the number of instances for the current class." + case .invalidCaptureDataRecord: + return "The Current Capture is invalid." + case .managerConfigurationFailed: + return "Annotation Configuration failed" + case .authenticationError: + return "Authentication error. Please log in again." + case .workspaceConfigurationFailed: + return "Workspace configuration failed. Please check your workspace settings." + case .attributeEstimationFailed(let error): + return "Some Attribute Estimation calculations failed. They may be ignored. \nError: \(error.localizedDescription)" + case .uploadFailed: + return "Failed to upload annotations." + } + } +} + +struct SelectFeatureInfoTipBase: Tip { + + var title: Text { + Text(AnnotationViewBaseConstants.Texts.selectFeatureInfoTipTitle) + } + var message: Text? { + Text(AnnotationViewBaseConstants.Texts.selectFeatureInfoTipMessage) + } + var image: Image? { + Image(systemName: AnnotationViewBaseConstants.Images.infoIcon) + .resizable() + } + var actions: [Action] { + // Define a learn more button. + Action( + id: AnnotationViewBaseConstants.Texts.selectFeatureInfoTipLearnMoreButtonTitle, + title: AnnotationViewBaseConstants.Texts.selectFeatureInfoTipLearnMoreButtonTitle + ) + } +} + +class AnnotationFeatureClassSelectionViewBaseModel: ObservableObject { + @Published var currentIndex: Int? = nil + @Published var currentClass: AccessibilityFeatureClass? = nil + @Published var selectedAnnotationOption: AnnotationOption = .classOption(.default) + + func setCurrent(index: Int, classes: [AccessibilityFeatureClass]) throws { + guard index < classes.count else { + throw AnnotationViewBaseError.classIndexOutofBounds + } + self.currentIndex = index + self.currentClass = classes[index] + } + + func setOption(option: AnnotationOption) { + self.selectedAnnotationOption = option + } +} + +class AnnotationFeatureSelectionViewBaseModel: ObservableObject { + @Published var instances: [EditableAccessibilityFeature] = [] + @Published var currentIndex: Int? = nil + @Published var currentFeature: EditableAccessibilityFeature? = nil + + func setInstances(_ instances: [EditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { + self.instances = instances + /// If the class is sidewalk, we always select the first instance, as there should be only one sidewalk instance. + if (currentClass.kind == .sidewalk) { + try setIndex(index: 0) + } else { + try setIndex(index: nil) + } + } + + func setIndex(index: Int?) throws { + guard let index = index else { + self.currentIndex = nil + self.currentFeature = nil + return + } + guard index < instances.count else { + throw AnnotationViewBaseError.instanceIndexOutofBounds + } + self.currentIndex = index + self.currentFeature = instances[index] + } + + func setCurrent(index: Int?, instances: [EditableAccessibilityFeature], currentClass: AccessibilityFeatureClass) throws { + try setInstances(instances, currentClass: currentClass) + try setIndex(index: index) + } + + func setOptionOnFeature(option: AnnotationOption) { + if let currentFeature = self.currentFeature { + objectWillChange.send() + currentFeature.setAnnotationOption(option) + } + } +} + +class AnnotationViewStatusViewBaseModel: ObservableObject { + @Published var isFailed: Bool = false + @Published var errorMessage: String = "" + @Published var shouldDismiss: Bool = true + + init(shouldDismiss: Bool = true) { + self.shouldDismiss = shouldDismiss + } + + func update(isFailed: Bool, errorMessage: String, shouldDismiss: Bool = true) { + self.isFailed = isFailed + self.errorMessage = errorMessage + self.shouldDismiss = shouldDismiss + } + + func update(isFailed: Bool, error: Error, shouldDismiss: Bool = true) { + self.isFailed = isFailed + let dismissKey = shouldDismiss ? + AnnotationViewBaseConstants.Texts.managerStatusAlertMessageDismissScreenSuffixKey : + AnnotationViewBaseConstants.Texts.managerStatusAlertMessageDismissAlertSuffixKey + self.errorMessage = "\(error.localizedDescription) \(dismissKey)" + self.shouldDismiss = shouldDismiss + } +} + +public struct AnnotationViewBase: View { + let selectedClasses: [AccessibilityFeatureClass] + let captureLocation: CLLocationCoordinate2D + + @EnvironmentObject var sharedAppData: SharedBaseData + @EnvironmentObject var sharedBaseContext: SharedBaseContext + @Environment(\.dismiss) var dismiss + + @StateObject var manager: AnnotationImageManager = AnnotationImageManager() + + @StateObject var segmentationAnnontationPipeline: SegmentationAnnotationPipeline = SegmentationAnnotationPipeline() + @StateObject var attributeEstimationPipeline: AttributeEstimationPipeline = AttributeEstimationPipeline() + + @StateObject private var managerStatusViewModel = AnnotationViewStatusViewBaseModel() + @State private var interfaceOrientation: UIInterfaceOrientation = .portrait // To bind one-way with manager's orientation + + @StateObject var featureClassSelectionViewModel = AnnotationFeatureClassSelectionViewBaseModel() + @StateObject var featureSelectionViewModel = AnnotationFeatureSelectionViewBaseModel() + @State private var isShowingAnnotationFeatureDetailView: Bool = false + +// var selectFeatureInfoTipBase = SelectFeatureInfoTipBase() + @State private var showSelectFeatureLearnMoreSheet = false + + public var body: some View { + VStack { + HStack { + Spacer() + Text(AnnotationViewBaseConstants.Texts.annotationViewTitle) + .font(.headline) + .padding() + Spacer() + } + .overlay( + HStack { + Spacer() + Button(action: { + dismiss() + }) { + Image(systemName: AnnotationViewBaseConstants.Images.closeIcon) + .resizable() + .frame(width: 20, height: 20) + } + .padding() + } + ) + + if let currentClass = featureClassSelectionViewModel.currentClass { + mainContent(currentClass: currentClass) + } else { + loadingPageView() + } + } + .task { + await handleOnAppear() + } + .onChange(of: featureClassSelectionViewModel.currentClass) { oldClass, newClass in + handleOnClassChange() + } + /// We are using index to track change in instance, instead of the instance itself, because we want to use the index for naming the instance in the picker. + /// To use the instance directly would require AccessibilityFeature to conform to Hashable, which is possible, by just using id. + /// But while rendering the picker, we would need to create a new Array of enumerated instances, which would be less efficient. + .onChange(of: featureSelectionViewModel.currentIndex) { oldIndex, newIndex in + handleOnInstanceChange(oldIndex: oldIndex, newIndex: newIndex) + } + .sheet(isPresented: $isShowingAnnotationFeatureDetailView) { + if let currentFeature = featureSelectionViewModel.currentFeature, + let currentFeatureIndex = featureSelectionViewModel.currentIndex + { + AnnotationFeatureDetailViewBase( + accessibilityFeature: currentFeature, + title: "\(currentFeature.accessibilityFeatureClass.name.capitalized): \(currentFeatureIndex)" + ) { feature in + EmptyView() + } + .presentationDetents([.medium, .large]) + } else { + Text(AnnotationViewBaseConstants.Texts.featureDetailNotAvailableText) + .presentationDetents([.medium, .large]) + } + } + .sheet(isPresented: $showSelectFeatureLearnMoreSheet) { + SelectFeatureLearnMoreSheetView() + .presentationDetents([.medium, .large]) + } + .alert(AnnotationViewBaseConstants.Texts.managerStatusAlertTitleKey, isPresented: $managerStatusViewModel.isFailed, actions: { + Button(AnnotationViewBaseConstants.Texts.managerStatusAlertDismissButtonKey) { + let shouldDismiss = managerStatusViewModel.shouldDismiss + managerStatusViewModel.update(isFailed: false, errorMessage: "") + if shouldDismiss { + dismiss() + } + } + }, message: { + Text(managerStatusViewModel.errorMessage) + }) + } + + private func loadingPageView() -> some View { + VStack { + Spacer() + Text(AnnotationViewBaseConstants.Texts.loadingPageText) + SpinnerView() + Spacer() + } + } + + @ViewBuilder + private func orientationStack(@ViewBuilder content: () -> Content) -> some View { + manager.interfaceOrientation.isLandscape ? + AnyLayout(HStackLayout())(content) : + AnyLayout(VStackLayout())(content) + } + + @ViewBuilder + private func mainContent(currentClass: AccessibilityFeatureClass) -> some View { + let isDisabledFeatureDetailButton = featureSelectionViewModel.currentFeature == nil + orientationStack { + HostedAnnotationImageViewController(annotationImageManager: manager) + + VStack { + HStack { + Spacer() + Text("\(AnnotationViewBaseConstants.Texts.currentClassPrefixText): \(currentClass.name)") + Spacer() + } + + HStack { + Spacer() + CustomPicker ( + label: AnnotationViewBaseConstants.Texts.selectObjectText, + selection: $featureSelectionViewModel.currentIndex, + isContainsAll: currentClass.kind != .sidewalk + ) { + ForEach(featureSelectionViewModel.instances.indices, id: \.self) { featureIndex in + Text("\(currentClass.name.capitalized): \(featureIndex)") + .tag(featureIndex as Int?) + } + } + Button(action: { + isShowingAnnotationFeatureDetailView = true + }) { + Image(systemName: AnnotationViewBaseConstants.Images.ellipsisIcon) + } + .buttonStyle(.bordered) + .padding(.horizontal, 5) + .disabled(isDisabledFeatureDetailButton) + Spacer() + } + .frame(maxWidth: .infinity) + .padding(.horizontal, 30) + .overlay( + HStack { + Spacer() + Button(action: { + showSelectFeatureLearnMoreSheet = true + }) { + Image(systemName: AnnotationViewBaseConstants.Images.infoIcon) + .resizable() + .frame(width: 20, height: 20) + } + .padding(.trailing, 10) + } + ) + + ProgressBar(value: 0) + + HStack { + Spacer() + annotationOptionsView(currentClass: currentClass) + Spacer() + } + .padding() + + Button(action: { + confirmAnnotation() + }) { + Text(isCurrentIndexLast() ? AnnotationViewBaseConstants.Texts.finishText : AnnotationViewBaseConstants.Texts.nextText) + .padding() + } + } + } + } + + private func annotationOptionsView(currentClass: AccessibilityFeatureClass) -> some View { + if let currentFeature = featureSelectionViewModel.currentFeature { + let annotationOptions: [AnnotationOption] = AnnotationOptionFeature.allCases.map { .individualOption($0) } + return VStack(spacing: 10) { + ForEach(annotationOptions, id: \.self) { option in + Button(action: { + featureSelectionViewModel.setOptionOnFeature(option: option) + }) { + Text(option.rawValue) + .font(.subheadline) + .frame(maxWidth: .infinity) + .padding() + .background(currentFeature.selectedAnnotationOption == option ? Color.blue : Color.gray) + .foregroundStyle(.white) + .cornerRadius(10) + } + } + } + } else { + let annotationOptions: [AnnotationOption] = AnnotationOptionFeatureClass.allCases.map { .classOption($0) } + return VStack(spacing: 10) { + ForEach(annotationOptions, id: \.self) { option in + Button(action: { + featureClassSelectionViewModel.setOption(option: option) + }) { + Text(option.rawValue) + .font(.subheadline) + .frame(maxWidth: .infinity) + .padding() + .background(featureClassSelectionViewModel.selectedAnnotationOption == option ? Color.blue : Color.gray) + .foregroundStyle(.white) + .cornerRadius(10) + } + } + } + } + } + + private func isCurrentIndexValid() -> Bool { + guard let currentCaptureDataRecord = sharedAppData.currentCaptureDataRecord, + let currentClassIndex = featureClassSelectionViewModel.currentIndex else { + return false + } + let segmentedClasses = currentCaptureDataRecord.imageData.captureImageDataResults.segmentedClasses + return (currentClassIndex >= 0 && currentClassIndex < segmentedClasses.count) + } + + private func isCurrentIndexLast() -> Bool { + guard let currentCaptureDataRecord = sharedAppData.currentCaptureDataRecord, + let currentClassIndex = featureClassSelectionViewModel.currentIndex else { + return false + } + let segmentedClasses = currentCaptureDataRecord.imageData.captureImageDataResults.segmentedClasses + return currentClassIndex == segmentedClasses.count - 1 + } + + private func handleOnAppear() async { + do { + guard let currentCaptureDataRecord = sharedAppData.currentCaptureDataRecord else { + throw AnnotationViewBaseError.invalidCaptureDataRecord + } + var captureMeshData: (any CaptureMeshDataProtocol)? = nil + if sharedBaseContext.isEnhancedAnalysisEnabled { + guard let captureMeshDataResults = currentCaptureDataRecord.meshData?.captureMeshDataResults else { + throw AnnotationViewBaseError.invalidCaptureDataRecord + } + captureMeshData = CaptureImageAndMeshData( + captureImageData: CaptureImageData(currentCaptureDataRecord.imageData), + captureMeshDataResults: captureMeshDataResults + ) + } + let segmentedClasses = currentCaptureDataRecord.imageData.captureImageDataResults.segmentedClasses + try segmentationAnnontationPipeline.configure() + try attributeEstimationPipeline.configure( + captureImageData: currentCaptureDataRecord.imageData, + /// TODO: MESH PROCESSING: Enable mesh data processing + captureMeshData: captureMeshData + ) + try manager.configure( + selectedClasses: selectedClasses, segmentationAnnotationPipeline: segmentationAnnontationPipeline, + captureImageData: currentCaptureDataRecord.imageData, + captureMeshData: captureMeshData, + isEnhancedAnalysisEnabled: sharedBaseContext.isEnhancedAnalysisEnabled + ) + let captureDataHistory = Array(await sharedAppData.captureDataQueue.snapshot()) + manager.setupAlignedSegmentationLabelImages(captureDataHistory: captureDataHistory) + try featureClassSelectionViewModel.setCurrent(index: 0, classes: segmentedClasses) + } catch { + managerStatusViewModel.update(isFailed: true, error: error) + } + } + + private func handleOnClassChange() { + do { + guard let currentClass = featureClassSelectionViewModel.currentClass else { + throw AnnotationViewBaseError.invalidCaptureDataRecord + } + let accessibilityFeatures = try manager.updateFeatureClass(accessibilityFeatureClass: currentClass) + var lastEstimationError: Error? = nil + accessibilityFeatures.forEach { accessibilityFeature in + do { + try attributeEstimationPipeline.setPrerequisites(accessibilityFeature: accessibilityFeature) + try attributeEstimationPipeline.processLocationRequest( + deviceLocation: captureLocation, + accessibilityFeature: accessibilityFeature + ) + try attributeEstimationPipeline.processAttributeRequest( + accessibilityFeature: accessibilityFeature + ) + attributeEstimationPipeline.clearPrerequisites() + } catch { + lastEstimationError = error + } + } + featureClassSelectionViewModel.setOption(option: .classOption(.default)) + try featureSelectionViewModel.setInstances(accessibilityFeatures, currentClass: currentClass) + if let lastEstimationError { + throw AnnotationViewBaseError.attributeEstimationFailed(lastEstimationError) + } + } catch AnnotationViewBaseError.attributeEstimationFailed(let error) { + managerStatusViewModel.update( + isFailed: true, error: AnnotationViewBaseError.attributeEstimationFailed(error), shouldDismiss: false + ) + } catch { + managerStatusViewModel.update(isFailed: true, error: error, shouldDismiss: false) + } + } + + private func handleOnInstanceChange(oldIndex: Int?, newIndex: Int?) { + do { + try featureSelectionViewModel.setIndex(index: featureSelectionViewModel.currentIndex) + } catch { + managerStatusViewModel.update(isFailed: true, error: error) + } + do { + guard let currentClass = featureClassSelectionViewModel.currentClass else { + throw AnnotationViewBaseError.invalidCaptureDataRecord + } + var accessibilityFeatures: [EditableAccessibilityFeature] + var featureSelectedStatus: [UUID: Bool] = [:] + var updateFeatureResults: AnnotationImageFeatureUpdateResults? = nil + if let currentFeature = featureSelectionViewModel.currentFeature { + accessibilityFeatures = [currentFeature] + featureSelectedStatus[currentFeature.id] = true /// Selected and highlighted + if let oldIndex = oldIndex, oldIndex != featureSelectionViewModel.currentIndex, + oldIndex >= 0, oldIndex < featureSelectionViewModel.instances.count { + let oldFeature = featureSelectionViewModel.instances[oldIndex] + accessibilityFeatures.append(oldFeature) + featureSelectedStatus[oldFeature.id] = false /// Selected, but not highlighted + } + /// MARK: Temporary code for visualization. Incurs significant performance overhead. + if currentClass.kind.attributes.contains(where: { + $0 == .width || $0 == .runningSlope || $0 == .crossSlope || $0 == .surfaceIntegrity + }) { + let plane = try attributeEstimationPipeline.calculateAlignedPlane( + accessibilityFeature: currentFeature, worldPoints: nil + ) + let projectedPlane = try attributeEstimationPipeline.calculateProjectedPlane( + accessibilityFeature: currentFeature, plane: plane + ) + let damageDetectionResults = try attributeEstimationPipeline.getDamageDetectionResults( + accessibilityFeature: currentFeature + ) + updateFeatureResults = AnnotationImageFeatureUpdateResults( + plane: plane, projectedPlane: projectedPlane, + damageDetectionResults: damageDetectionResults + ) + } + } else { + accessibilityFeatures = featureSelectionViewModel.instances + featureSelectedStatus = featureSelectionViewModel.instances.reduce(into: [:]) { dict, feature in + dict[feature.id] = false /// Selected, but not highlighted + } + } +// let isSelected = featureSelectionViewModel.currentFeature != nil + try manager.updateFeature( + accessibilityFeatureClass: currentClass, + accessibilityFeatures: accessibilityFeatures, + featureSelectedStatus: featureSelectedStatus, + updateFeatureResults: updateFeatureResults + ) + } catch { + managerStatusViewModel.update(isFailed: true, error: error, shouldDismiss: false) + } + } + + private func confirmAnnotation() { + Task { + do { + try moveToNextClass() + } catch AnnotationViewBaseError.classIndexOutofBounds { + managerStatusViewModel.update(isFailed: true, error: AnnotationViewBaseError.classIndexOutofBounds) + } catch { + managerStatusViewModel.update(isFailed: true, error: error, shouldDismiss: false) + } + } + } + + private func moveToNextClass() throws { + if isCurrentIndexLast() { + self.dismiss() + return + } + /// Move to next class + guard let currentCaptureDataRecord = sharedAppData.currentCaptureDataRecord, + let currentClassIndex = featureClassSelectionViewModel.currentIndex else { + throw AnnotationViewBaseError.invalidCaptureDataRecord + } + let segmentedClasses = currentCaptureDataRecord.imageData.captureImageDataResults.segmentedClasses + try featureClassSelectionViewModel.setCurrent(index: currentClassIndex + 1, classes: segmentedClasses) + } +} + +struct SelectFeatureLearnMoreSheetView: View { + @Environment(\.dismiss) + var dismiss + + var body: some View { + VStack(spacing: 20) { + // Image(systemName: "number") + // .resizable() + // .scaledToFit() + // .frame(width: 160) + // .foregroundStyle(.accentColor) + Text(AnnotationViewBaseConstants.Texts.selectFeatureInfoLearnMoreSheetTitle) + .font(.headline) + Text(AnnotationViewBaseConstants.Texts.selectFeatureInfoLearnMoreSheetMessage) + .foregroundStyle(.secondary) + Button("Dismiss") { + dismiss() + } + } + .padding(.horizontal, 40) + } +} diff --git a/PointNMapShared/Sources/PointNMap/View/SubView/AnnotationFeatureDetailViewBase.swift b/PointNMapShared/Sources/PointNMap/View/SubView/AnnotationFeatureDetailViewBase.swift new file mode 100644 index 00000000..5131452f --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/SubView/AnnotationFeatureDetailViewBase.swift @@ -0,0 +1,394 @@ +// +// AnnotationFeatureDetailView.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/28/25. +// + +import SwiftUI +import Combine + +public func AnnotationFeatureDetailLocationFormatter() -> NumberFormatter { + let nf = NumberFormatter() + nf.numberStyle = .decimal + nf.maximumFractionDigits = 7 + nf.minimumFractionDigits = 7 + return nf +} + +public enum AnnotationFeatureDetailViewConstants { + public enum Texts { + /// Alert texts + public static let statusAlertTitleKey: String = "Error" + public static let statusAlertDismissAlertSuffixKey: String = "Press OK to dismiss this alert." + public static let statusAlertDismissButtonKey: String = "OK" + + /// Is Existing + public static let isExistingTitle: String = "Is this an existing feature?" + + /// Invalid + public static let invalidTextKey: String = "Invalid" + } + + public enum Images { + /// Alert images + public static let statusAlertImageNameKey: String = "exclamationmark.triangle.fill" + } +} + +/** + A view that displays detailed information about an accessibility feature annotation. + Sub-view of the `AnnotationView`. + */ +public struct AnnotationFeatureDetailViewBase< + Feature: EditableAccessibilityFeatureProtocol, + LocationSection: View + >: View { + + public enum AnnotationFeatureDetailViewError: Error, LocalizedError { + case invalidAttributeValue(attribute: AccessibilityFeatureAttribute, message: String) + + public var errorDescription: String? { + switch self { + case .invalidAttributeValue(let attribute, let message): + return "Invalid value for \(attribute.displayName): \(message)" + } + } + } + + public struct AttributeErrorStatus { + public var isError: Bool + public var errorMessage: String + + public init(isError: Bool, errorMessage: String) { + self.isError = isError + self.errorMessage = errorMessage + } + } + + public class StatusViewModel: ObservableObject { + @Published public var attributeStatusMap: [AccessibilityFeatureAttribute: AttributeErrorStatus] = [:] + + public func configure(accessibilityFeature: Feature) { + let attributes = accessibilityFeature.accessibilityFeatureClass.kind.attributes + var attributeStatusMap: [AccessibilityFeatureAttribute: AttributeErrorStatus] = [:] + attributes.forEach { + let initialStatus = AttributeErrorStatus(isError: false, errorMessage: "") + attributeStatusMap[$0] = initialStatus + } + self.attributeStatusMap = attributeStatusMap + } + + public func updateAttributeStatus( + for attribute: AccessibilityFeatureAttribute, + isError: Bool, + errorMessage: String + ) { + if let _ = attributeStatusMap[attribute] { + attributeStatusMap[attribute]?.isError = isError + attributeStatusMap[attribute]?.errorMessage = errorMessage + } + } + } + + public var accessibilityFeature: Feature + public let title: String + private let locationSection: (Feature) -> LocationSection + private let locationFormatter = AnnotationFeatureDetailLocationFormatter() + + @StateObject private var statusViewModel = AnnotationFeatureDetailViewBase.StatusViewModel() + @FocusState private var focusedField: AccessibilityFeatureAttribute? + /// Note: Fields such as pickers don't have built-in ways to update their UI based on user input. Hence we need to trigger a refresh manually when their value changes. + @State private var refreshTrigger: Int = 0 + + public init( + accessibilityFeature: Feature, + title: String, + @ViewBuilder locationSection: @escaping (Feature) -> LocationSection + ) { + self.accessibilityFeature = accessibilityFeature + self.title = title + self.locationSection = locationSection + } + + public var body: some View { + VStack { + Text(title) + .font(.headline) + .padding() + + Form { + Section(header: Text(AnnotationViewBaseConstants.Texts.featureDetailViewIdKey)) { + Text(accessibilityFeature.id.uuidString) + .foregroundStyle(.secondary) + } + + /** + Location Section + */ + locationSection(accessibilityFeature) + + /** + The Attributes Section + Instead of using a ForEach loop, we manually list out each attribute to have more control over the layout and presentation. + This allows us to customize the display for each attribute type as needed. + There isn't a large number of attributes, so this approach is manageable and provides better clarity. + */ + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.width)) + { + Section(header: Text(AccessibilityFeatureAttribute.width.displayName)) { + numberTextFieldView(attribute: .width) + .focused($focusedField, equals: .width) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.runningSlope)) + { + Section(header: Text(AccessibilityFeatureAttribute.runningSlope.displayName)) { + numberTextFieldView(attribute: .runningSlope) + .focused($focusedField, equals: .runningSlope) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.crossSlope)) + { + Section(header: Text(AccessibilityFeatureAttribute.crossSlope.displayName)) { + numberTextFieldView(attribute: .crossSlope) + .focused($focusedField, equals: .crossSlope) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.surfaceIntegrity)) + { + Section(header: Text(AccessibilityFeatureAttribute.surfaceIntegrity.displayName)) { + pickerView(attribute: .surfaceIntegrity) + .focused($focusedField, equals: .surfaceIntegrity) + .id(refreshTrigger) // Refresh the Picker view when refreshTrigger changes + } + } + + /// Experimental Attributes Section + if (accessibilityFeature.accessibilityFeatureClass.kind.experimentalAttributes.contains(.lidarDepth)) { + Section(header: Text(AccessibilityFeatureAttribute.lidarDepth.displayName)) { + numberTextView(attribute: .lidarDepth) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.experimentalAttributes.contains(.latitudeDelta)) { + Section(header: Text(AccessibilityFeatureAttribute.latitudeDelta.displayName)) { + numberTextView(attribute: .latitudeDelta) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.experimentalAttributes.contains(.longitudeDelta)) { + Section(header: Text(AccessibilityFeatureAttribute.longitudeDelta.displayName)) { + numberTextView(attribute: .longitudeDelta) + } + } + + /// Legacy Attributes Section + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.widthLegacy)) + { + Section(header: Text(AccessibilityFeatureAttribute.widthLegacy.displayName)) { + numberTextFieldView(attribute: .widthLegacy) + .focused($focusedField, equals: .widthLegacy) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.runningSlopeLegacy)) + { + Section(header: Text(AccessibilityFeatureAttribute.runningSlopeLegacy.displayName)) { + numberTextFieldView(attribute: .runningSlopeLegacy) + .focused($focusedField, equals: .runningSlopeLegacy) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.crossSlopeLegacy)) + { + Section(header: Text(AccessibilityFeatureAttribute.crossSlopeLegacy.displayName)) { + numberTextFieldView(attribute: .crossSlopeLegacy) + .focused($focusedField, equals: .crossSlopeLegacy) + } + } + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.widthFromImage)) + { + Section(header: Text(AccessibilityFeatureAttribute.widthFromImage.displayName)) { + numberTextFieldView(attribute: .widthFromImage) + .focused($focusedField, equals: .widthFromImage) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.runningSlopeFromImage)) + { + Section(header: Text(AccessibilityFeatureAttribute.runningSlopeFromImage.displayName)) { + numberTextFieldView(attribute: .runningSlopeFromImage) + .focused($focusedField, equals: .runningSlopeFromImage) + } + } + + if (accessibilityFeature.accessibilityFeatureClass.kind.attributes.contains(.crossSlopeFromImage)) + { + Section(header: Text(AccessibilityFeatureAttribute.crossSlopeFromImage.displayName)) { + numberTextFieldView(attribute: .crossSlopeFromImage) + .focused($focusedField, equals: .crossSlopeFromImage) + } + } + } + } + .onAppear { + self.statusViewModel.configure(accessibilityFeature: accessibilityFeature) + focusedField = nil + } + .onTapGesture { + // Dismiss the keyboard when tapping outside of a TextField + focusedField = nil + } + } + + @ViewBuilder + private func numberTextFieldView(attribute: AccessibilityFeatureAttribute) -> some View { + let attributeStatus = statusViewModel.attributeStatusMap[attribute] ?? .init(isError: false, errorMessage: "") + VStack { + if (attributeStatus.isError) { + /// A red colored error message + HStack { + Label( + attributeStatus.errorMessage, + systemImage: AnnotationFeatureDetailViewConstants.Images.statusAlertImageNameKey + ) + .foregroundStyle(.red) + .font(.caption) + Spacer() + } + } + TextField( + attribute.displayName, + value: Binding( + get: { + guard let attributeValue = accessibilityFeature.attributeValues[attribute], + let attributeValue, + let attributeBindableValue = attributeValue.toDouble() else { + return 0.0 + } + return attributeBindableValue + }, + set: { newValue in + do { + let newDoubleValue = Double(newValue) + guard let newAttributeValue = attribute.value(from: newDoubleValue) else { + return + } + try accessibilityFeature.setAttributeValue(newAttributeValue, for: attribute) + } catch { + setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") + } + } + ), + format: .number + ) + .textFieldStyle(.roundedBorder) + .keyboardType(.decimalPad) + } + } + + @ViewBuilder + private func numberTextView(attribute: AccessibilityFeatureAttribute) -> some View { + let attributeStatus = statusViewModel.attributeStatusMap[attribute] ?? .init(isError: false, errorMessage: "") + let valueToDisplay: String = { + guard let attributeValue = accessibilityFeature.experimentalAttributeValues[attribute], + let attributeValue, + let attributeBindableValue = attributeValue.toDouble() else { + return AnnotationFeatureDetailViewConstants.Texts.invalidTextKey + } + return String(attributeBindableValue) + }() + VStack { + if (attributeStatus.isError) { + /// A red colored error message + HStack { + Label( + attributeStatus.errorMessage, + systemImage: AnnotationFeatureDetailViewConstants.Images.statusAlertImageNameKey + ) + .foregroundStyle(.red) + .font(.caption) + Spacer() + } + } + Text(valueToDisplay) + } + } + + @ViewBuilder + private func toggleView(attribute: AccessibilityFeatureAttribute) -> some View { + Toggle( + isOn: Binding( + get: { + guard let attributeValue = accessibilityFeature.attributeValues[attribute], + let attributeValue, + let attributeBindableValue = attributeValue.toBool() else { + return false + } + return attributeBindableValue + }, + set: { newValue in + do { + let newBoolValue = Bool(newValue) + guard let newAttributeValue = attribute.value(from: newBoolValue) else { + return + } + try accessibilityFeature.setAttributeValue(newAttributeValue, for: attribute) + } catch { + setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") + } + } + ) + ) { + Text(attribute.displayName) + } + } + + @ViewBuilder + private func pickerView(attribute: AccessibilityFeatureAttribute) -> some View { + Picker( + attribute.displayName, + selection: Binding( + get: { + guard case .categorical(let category) = accessibilityFeature.attributeValues[attribute] else { + return attribute.categoricalOptions().first + } + return category + }, + set: { newValue in + guard let newValue else { return } + do { + let newCategoricalValue: AccessibilityFeatureAttribute.Value = .categorical(newValue) + try accessibilityFeature.setAttributeValue(newCategoricalValue, for: attribute) + refreshTrigger += 1 // Trigger a refresh to update the Picker's displayed value + } catch { + setAttributeStatusErrorText(for: attribute, message: "\(error.localizedDescription)") + } + } + )) { + ForEach(attribute.categoricalOptions(), id: \.self) { option in + Text(option.rawValue).tag(option) + } + } + .pickerStyle(.menu) + } + + private func setAttributeStatusErrorText( + for attribute: AccessibilityFeatureAttribute, message: String + ) { + statusViewModel.updateAttributeStatus(for: attribute, isError: true, errorMessage: message) + Task { + do { + try await Task.sleep(for: .seconds(2)) + statusViewModel.updateAttributeStatus(for: attribute, isError: false, errorMessage: "") + } catch { + print("Failed to reset attribute error status: \(error.localizedDescription)") + } + } + } +} diff --git a/IOSAccessAssessment/View/SubView/InvalidContentView.swift b/PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift similarity index 100% rename from IOSAccessAssessment/View/SubView/InvalidContentView.swift rename to PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift diff --git a/PointNMapShared/Sources/PointNMap/View/UI/CustomPicker.swift b/PointNMapShared/Sources/PointNMap/View/UI/CustomPicker.swift new file mode 100644 index 00000000..525758be --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/UI/CustomPicker.swift @@ -0,0 +1,34 @@ +// +// CustomPicker.swift +// IOSAccessAssessment +// +// Created by Himanshu on 11/28/25. +// + +import SwiftUI + +/** + Custom picker with a select all option that binds nil to "All". + */ +public struct CustomPicker: View { + public let label: String + @Binding public var selection: SelectionValue? + public let isContainsAll: Bool + public let content: () -> Content + + public init(label: String, selection: Binding, isContainsAll: Bool = true, @ViewBuilder content: @escaping () -> Content) { + self.label = label + self._selection = selection + self.isContainsAll = isContainsAll + self.content = content + } + + public var body: some View { + Picker(label, selection: $selection) { + if (isContainsAll) { + Text("All").tag(nil as SelectionValue?) + } + content() + } + } +} diff --git a/PointNMapShared/Sources/PointNMap/View/UI/ProgressBar.swift b/PointNMapShared/Sources/PointNMap/View/UI/ProgressBar.swift new file mode 100644 index 00000000..5cd533b4 --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/UI/ProgressBar.swift @@ -0,0 +1,22 @@ +// +// ProgressBar.swift +// IOSAccessAssessment +// +// Created by TCAT on 9/26/24. +// + +import SwiftUI + +public struct ProgressBar: View { + public var value: Float + + public init(value: Float) { + self.value = value + } + + public var body: some View { + ProgressView(value: value) + .progressViewStyle(LinearProgressViewStyle()) + .padding() + } +} diff --git a/PointNMapShared/Sources/PointNMap/View/UI/SpinnerView.swift b/PointNMapShared/Sources/PointNMap/View/UI/SpinnerView.swift new file mode 100644 index 00000000..963eab9e --- /dev/null +++ b/PointNMapShared/Sources/PointNMap/View/UI/SpinnerView.swift @@ -0,0 +1,24 @@ +// +// SpinnerView.swift +// IOSAccessAssessment +// +// Created by TCAT on 9/24/24. +// + +import SwiftUI + +public struct SpinnerView: View { + public init() {} + + public var body: some View { + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: .blue)) + .scaleEffect(2.0, anchor: .center) // Makes the spinner larger + .onAppear { + DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { + // Simulates a delay in content loading + // Perform transition to the next view here + } + } + } +} From c2a3d7f09ca7fa58dddbed580150c552c00ec9d3 Mon Sep 17 00:00:00 2001 From: himanshunaidu Date: Fri, 1 May 2026 16:53:39 -0700 Subject: [PATCH 14/14] Complete the first version of shared framework and fix relevant bugs --- IOSAccessAssessment.xcodeproj/project.pbxproj | 4 ++-- IOSAccessAssessment/View/ARCameraView.swift | 1 + .../View/TestMode/TestCameraView.swift | 1 + .../ARCamera/ARCameraViewController.swift | 4 ++++ .../Config/MapillaryCustom11ClassConfig.swift | 2 +- .../SurfaceIntegrityProcessor.swift | 11 +++++----- .../View/SubView/InvalidContentView.swift | 21 ++++++++++++------- 7 files changed, 28 insertions(+), 16 deletions(-) diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index 4058b90b..5f80832f 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -1733,7 +1733,7 @@ GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_NSHumanReadableCopyright = ""; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 26.0; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -1767,7 +1767,7 @@ GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_NSHumanReadableCopyright = ""; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 26.0; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", diff --git a/IOSAccessAssessment/View/ARCameraView.swift b/IOSAccessAssessment/View/ARCameraView.swift index 19a2779c..bd0902c6 100644 --- a/IOSAccessAssessment/View/ARCameraView.swift +++ b/IOSAccessAssessment/View/ARCameraView.swift @@ -310,6 +310,7 @@ struct ARCameraView: View { { throw ARCameraViewError.captureNoSegmentationAccessibilityFeatures } + default: break } try manager.pause() locationManager.stopLocationUpdates() diff --git a/IOSAccessAssessment/View/TestMode/TestCameraView.swift b/IOSAccessAssessment/View/TestMode/TestCameraView.swift index 81b93325..19afaf39 100644 --- a/IOSAccessAssessment/View/TestMode/TestCameraView.swift +++ b/IOSAccessAssessment/View/TestMode/TestCameraView.swift @@ -379,6 +379,7 @@ struct TestCameraView: View { { throw TestCameraViewError.captureNoSegmentationAccessibilityFeatures } + default: break } let captureLocation = datadatasetCaptureData.location let captureHeading = datadatasetCaptureData.heading diff --git a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift index d38da668..ce77e49f 100644 --- a/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift +++ b/PointNMapShared/Sources/PointNMap/ARCamera/ARCameraViewController.swift @@ -429,6 +429,10 @@ public final class ARCameraViewController: UIViewController, ARSessionCameraProc public struct HostedARCameraViewContainer: UIViewControllerRepresentable { public var arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate + public init(arSessionCameraProcessingDelegate: ARSessionCameraProcessingDelegate) { + self.arSessionCameraProcessingDelegate = arSessionCameraProcessingDelegate + } + public func makeUIViewController(context: Context) -> ARCameraViewController { let vc = ARCameraViewController(arSessionCameraProcessingDelegate: arSessionCameraProcessingDelegate) return vc diff --git a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift index d6741648..2ba29958 100644 --- a/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift +++ b/PointNMapShared/Sources/PointNMap/AccessibilityFeature/Config/MapillaryCustom11ClassConfig.swift @@ -10,7 +10,7 @@ import ARKit public extension AccessibilityFeatureConfig { static let mapillaryCustom11Config: AccessibilityFeatureClassConfig = AccessibilityFeatureClassConfig( - modelURL: Bundle.main.url(forResource: "bisenetv2_11_640_640", withExtension: "mlmodelc"), + modelURL: PointNMapSharedResources.bundle.url(forResource: "bisenetv2_11_640_640", withExtension: "mlmodelc"), classes: [ // AccessibilityFeatureClass(name: "Road", grayscaleValue: 0.0 / 255.0, labelValue: 0, // color: CIColor(red: 0.502, green: 0.251, blue: 0.502), diff --git a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift index ccf15591..179d8f23 100644 --- a/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift +++ b/PointNMapShared/Sources/PointNMap/ComputerVision/Projection/SurfaceIntegrity/SurfaceIntegrityProcessor.swift @@ -93,27 +93,28 @@ public struct SurfaceIntegrityProcessor { self.ciContext = CIContext(mtlDevice: device, options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]) - guard let countKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "countDeviantNormals"), + let library = try device.makeDefaultLibrary(bundle: PointNMapSharedResources.bundle) + guard let countKernelFunction = library.makeFunction(name: "countDeviantNormals"), let countPipeline = try? device.makeComputePipelineState(function: countKernelFunction) else { throw SurfaceIntegrityProcessorError.metalInitializationFailed } self.countPipeline = countPipeline - guard let stdKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "stdFromNormals"), + guard let stdKernelFunction = library.makeFunction(name: "stdFromNormals"), let stdPipeline = try? device.makeComputePipelineState(function: stdKernelFunction) else { throw SurfaceIntegrityProcessorError.metalInitializationFailed } self.stdPipeline = stdPipeline - guard let countPolygonKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "countDeviantPolygonNormals"), + guard let countPolygonKernelFunction = library.makeFunction(name: "countDeviantPolygonNormals"), let countPolygonPipeline = try? device.makeComputePipelineState(function: countPolygonKernelFunction) else { throw SurfaceIntegrityProcessorError.metalInitializationFailed } self.countPolygonPipeline = countPolygonPipeline - guard let areaWithinBoundsPolygonKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "areaWithinBoundsPolygon"), + guard let areaWithinBoundsPolygonKernelFunction = library.makeFunction(name: "areaWithinBoundsPolygon"), let areaWithinBoundsPolygonPipeline = try? device.makeComputePipelineState(function: areaWithinBoundsPolygonKernelFunction) else { throw SurfaceIntegrityProcessorError.metalInitializationFailed } self.areaWithinBoundsPolygonPipeline = areaWithinBoundsPolygonPipeline - guard let stdPolygonKernelFunction = device.makeDefaultLibrary()?.makeFunction(name: "stdFromPolygonNormals"), + guard let stdPolygonKernelFunction = library.makeFunction(name: "stdFromPolygonNormals"), let stdPolygonPipeline = try? device.makeComputePipelineState(function: stdPolygonKernelFunction) else { throw SurfaceIntegrityProcessorError.metalInitializationFailed } diff --git a/PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift b/PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift index 848c5612..b7eeed40 100644 --- a/PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift +++ b/PointNMapShared/Sources/PointNMap/View/SubView/InvalidContentView.swift @@ -7,19 +7,24 @@ import SwiftUI -enum InvalidContentViewConstants { - enum Images { - static let closeIcon: String = "xmark" +public enum InvalidContentViewConstants { + public enum Images { + public static let closeIcon: String = "xmark" } } -struct InvalidContentView: View { - let title: String - let message: String +public struct InvalidContentView: View { + public let title: String + public let message: String - @Environment(\.dismiss) var dismiss + @Environment(\.dismiss) public var dismiss - var body: some View { + public init(title: String, message: String) { + self.title = title + self.message = message + } + + public var body: some View { VStack { HStack { Spacer()