diff --git a/IOSAccessAssessment.xcodeproj/project.pbxproj b/IOSAccessAssessment.xcodeproj/project.pbxproj index bcb8950..fa7a5c0 100644 --- a/IOSAccessAssessment.xcodeproj/project.pbxproj +++ b/IOSAccessAssessment.xcodeproj/project.pbxproj @@ -70,6 +70,10 @@ A374FAB72EE0173600055268 /* OSMResponseElement.swift in Sources */ = {isa = PBXBuildFile; fileRef = A374FAB62EE0173200055268 /* OSMResponseElement.swift */; }; A37E3E3C2EED60F300B07B77 /* PngEncoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E3B2EED60F300B07B77 /* PngEncoder.mm */; }; A37E3E3D2EED60F300B07B77 /* lodepng.cpp in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E392EED60F300B07B77 /* lodepng.cpp */; }; + A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E942EFB66E600B07B77 /* CameraIntrinsicsEncoder.swift */; }; + A37E3E9B2EFB8F7500B07B77 /* HeadingEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9A2EFB8F7200B07B77 /* HeadingEncoder.swift */; }; + A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */; }; + A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */; }; A37E720E2ED5783600CFE4EF /* SharedAppContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */; }; A37E72102ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */; }; A37E72142ED95C0C00CFE4EF /* MeshHelpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A37E72132ED95C0900CFE4EF /* MeshHelpers.swift */; }; @@ -92,6 +96,7 @@ A3B2DDBF2DC99DEF003416FB /* HomographyRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */; }; A3B2DDC12DC99F44003416FB /* SegmentationModelRequestProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */; }; A3BB5AFB2DB210AE008673ED /* BinaryMaskFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */; }; + A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */; }; A3C22FD32CF194A600533BF7 /* CGImageUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C22FD22CF194A200533BF7 /* CGImageUtils.swift */; }; A3C22FD82CF2F0C300533BF7 /* DequeModule in Frameworks */ = {isa = PBXBuildFile; productRef = A3C22FD72CF2F0C300533BF7 /* DequeModule */; }; A3C55A472EAF513B00F6CFDC /* FrameRasterizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */; }; @@ -240,6 +245,10 @@ A37E3E392EED60F300B07B77 /* lodepng.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = lodepng.cpp; sourceTree = ""; }; A37E3E3A2EED60F300B07B77 /* PngEncoder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PngEncoder.h; sourceTree = ""; }; A37E3E3B2EED60F300B07B77 /* PngEncoder.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PngEncoder.mm; sourceTree = ""; }; + A37E3E942EFB66E600B07B77 /* CameraIntrinsicsEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraIntrinsicsEncoder.swift; sourceTree = ""; }; + A37E3E9A2EFB8F7200B07B77 /* HeadingEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HeadingEncoder.swift; sourceTree = ""; }; + A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureSnapshot.swift; sourceTree = ""; }; + A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureClassSnapshot.swift; sourceTree = ""; }; A37E720D2ED5783300CFE4EF /* SharedAppContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SharedAppContext.swift; sourceTree = ""; }; A37E720F2ED66A6400CFE4EF /* SegmentationAnnotationPipeline.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationAnnotationPipeline.swift; sourceTree = ""; }; A37E72132ED95C0900CFE4EF /* MeshHelpers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeshHelpers.swift; sourceTree = ""; }; @@ -262,6 +271,7 @@ A3B2DDBE2DC99DE9003416FB /* HomographyRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomographyRequestProcessor.swift; sourceTree = ""; }; A3B2DDC02DC99F3D003416FB /* SegmentationModelRequestProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SegmentationModelRequestProcessor.swift; sourceTree = ""; }; A3BB5AFA2DB210A8008673ED /* BinaryMaskFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BinaryMaskFilter.swift; sourceTree = ""; }; + A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccessibilityFeatureEncoder.swift; sourceTree = ""; }; A3C22FD22CF194A200533BF7 /* CGImageUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CGImageUtils.swift; sourceTree = ""; }; A3C55A462EAF513800F6CFDC /* FrameRasterizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameRasterizer.swift; sourceTree = ""; }; A3C55A482EAFFAB600F6CFDC /* CenterCropTransformUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CenterCropTransformUtils.swift; sourceTree = ""; }; @@ -453,14 +463,12 @@ isa = PBXGroup; children = ( A37E3E352EED60C100B07B77 /* CHelpers */, - A3FE16662E18C81500DAE5BE /* LocationEncoder.swift */, - A3FE16642E18C53D00DAE5BE /* CameraTransformEncoder.swift */, - A3FE16622E18BAE900DAE5BE /* ConfidenceEncoder.swift */, - A3FE16602E18BA5600DAE5BE /* RGBEncoder.swift */, - A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */, - A305B06B2E18A85D00ECCF9B /* DepthEncoder.swift */, + A37E3E982EFB8F4900B07B77 /* Location */, + A37E3E962EFB8F1300B07B77 /* Image */, + A37E3E972EFB8F3800B07B77 /* ARCamera */, + A37E3E992EFB8F5400B07B77 /* Others */, + A37E3E9C2EFB8F8400B07B77 /* AccessibilityFeature */, A305B05B2E18882500ECCF9B /* DatasetEncoder.swift */, - A3FE166B2E1C29C800DAE5BE /* OtherDetailsEncoder.swift */, ); path = LocalDataset; sourceTree = ""; @@ -662,6 +670,61 @@ path = CHelpers; sourceTree = ""; }; + A37E3E962EFB8F1300B07B77 /* Image */ = { + isa = PBXGroup; + children = ( + A3FE16622E18BAE900DAE5BE /* ConfidenceEncoder.swift */, + A3FE16602E18BA5600DAE5BE /* RGBEncoder.swift */, + A3FE166D2E1C2AEF00DAE5BE /* SegmentationEncoder.swift */, + A305B06B2E18A85D00ECCF9B /* DepthEncoder.swift */, + ); + path = Image; + sourceTree = ""; + }; + A37E3E972EFB8F3800B07B77 /* ARCamera */ = { + isa = PBXGroup; + children = ( + A3FE16642E18C53D00DAE5BE /* CameraTransformEncoder.swift */, + A37E3E942EFB66E600B07B77 /* CameraIntrinsicsEncoder.swift */, + ); + path = ARCamera; + sourceTree = ""; + }; + A37E3E982EFB8F4900B07B77 /* Location */ = { + isa = PBXGroup; + children = ( + A3FE16662E18C81500DAE5BE /* LocationEncoder.swift */, + A37E3E9A2EFB8F7200B07B77 /* HeadingEncoder.swift */, + ); + path = Location; + sourceTree = ""; + }; + A37E3E992EFB8F5400B07B77 /* Others */ = { + isa = PBXGroup; + children = ( + A3FE166B2E1C29C800DAE5BE /* OtherDetailsEncoder.swift */, + ); + path = Others; + sourceTree = ""; + }; + A37E3E9C2EFB8F8400B07B77 /* AccessibilityFeature */ = { + isa = PBXGroup; + children = ( + A37E3EA12EFBACF300B07B77 /* Components */, + A3BCBC4F2EFBB92500D15E15 /* AccessibilityFeatureEncoder.swift */, + ); + path = AccessibilityFeature; + sourceTree = ""; + }; + A37E3EA12EFBACF300B07B77 /* Components */ = { + isa = PBXGroup; + children = ( + A37E3E9D2EFBAA7D00B07B77 /* AccessibilityFeatureSnapshot.swift */, + A37E3E9F2EFBAADC00B07B77 /* AccessibilityFeatureClassSnapshot.swift */, + ); + path = Components; + sourceTree = ""; + }; A37E720A2ED571A800CFE4EF /* Definitions */ = { isa = PBXGroup; children = ( @@ -1075,6 +1138,7 @@ A308015C2EC09BB700B1BA3A /* CityscapesClassConfig.swift in Sources */, A308015D2EC09BB700B1BA3A /* CityscapesSubsetClassConfig.swift in Sources */, A35547C82EC1B0DB00F43AFD /* MappingData.swift in Sources */, + A37E3EA02EFBAADD00B07B77 /* AccessibilityFeatureClassSnapshot.swift in Sources */, A35547C42EC1AF5700F43AFD /* CaptureData.swift in Sources */, A308015E2EC09BB700B1BA3A /* CocoCustom35ClassConfig.swift in Sources */, A308015F2EC09BB700B1BA3A /* MapillaryCustom11ClassConfig.swift in Sources */, @@ -1158,13 +1222,16 @@ A35E05102EDE60C0003C26CF /* InvalidContentView.swift in Sources */, A35E050D2EDE35E1003C26CF /* LocalizationProcessor.swift in Sources */, A35E051E2EDFB09A003C26CF /* OSMWay.swift in Sources */, + A37E3E9B2EFB8F7500B07B77 /* HeadingEncoder.swift in Sources */, A305B05C2E18882800ECCF9B /* DatasetEncoder.swift in Sources */, A3DA4DA82EB94D84005BB812 /* MeshGPUSnapshot.swift in Sources */, A31A1E742EAC426C008B30B7 /* ARCameraViewController.swift in Sources */, A37E72162ED95CB400CFE4EF /* MeshGPUDefinitions.swift in Sources */, DAA7F8C22CA684AF003666D8 /* ProgressBar.swift in Sources */, + A37E3E952EFB66EB00B07B77 /* CameraIntrinsicsEncoder.swift in Sources */, A35547152EC198A600F43AFD /* ContourRequestProcessor.swift in Sources */, A30801642EC0A8AA00B1BA3A /* DetectedFeature.swift in Sources */, + A3BCBC502EFBB92900D15E15 /* AccessibilityFeatureEncoder.swift in Sources */, A3FFAA752DE00F3B002B99BD /* ARCameraManager.swift in Sources */, A3DC22EF2DCF119A0020CE84 /* HomographyTransformFilter.swift in Sources */, A3420F1C2E8D82E700CD617E /* APIEnvironment.swift in Sources */, @@ -1173,6 +1240,7 @@ A3FE16672E18C81800DAE5BE /* LocationEncoder.swift in Sources */, A3FFAA832DE5253E002B99BD /* bisenetv2_53_640_640.mlpackage in Sources */, A3FFAA7A2DE01A0F002B99BD /* ARCameraView.swift in Sources */, + A37E3E9E2EFBAA8700B07B77 /* AccessibilityFeatureSnapshot.swift in Sources */, A3FFAA782DE01637002B99BD /* ARCameraUtils.swift in Sources */, A3FE166E2E1C2AF200DAE5BE /* SegmentationEncoder.swift in Sources */, A30BED3A2ED162F1004A5B51 /* ConnectedComponents.swift in Sources */, @@ -1364,7 +1432,7 @@ INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = "$(inherited)"; - MARKETING_VERSION = 0.2; + MARKETING_VERSION = 0.3; MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment/**"; PRODUCT_BUNDLE_IDENTIFIER = edu.uw.pointmapper; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -1407,7 +1475,7 @@ INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; IPHONEOS_DEPLOYMENT_TARGET = 18.6; LD_RUNPATH_SEARCH_PATHS = "$(inherited)"; - MARKETING_VERSION = 0.2; + MARKETING_VERSION = 0.3; MTL_HEADER_SEARCH_PATHS = "$(SRCROOT)/IOSAccessAssessment/**"; PRODUCT_BUNDLE_IDENTIFIER = edu.uw.pointmapper; PRODUCT_NAME = "$(TARGET_NAME)"; diff --git a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift b/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift index 744daf2..04e7e06 100644 --- a/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift +++ b/IOSAccessAssessment/AccessibilityFeature/Definitions/AccessibilityFeature.swift @@ -18,7 +18,7 @@ enum AccessibilityFeatureError: Error, LocalizedError { } } -struct LocationDetails { +struct LocationDetails: Codable, Sendable { var coordinates: [[CLLocationCoordinate2D]] init(coordinate: CLLocationCoordinate2D) { @@ -32,6 +32,37 @@ struct LocationDetails { init(coordinates: [[CLLocationCoordinate2D]]) { self.coordinates = coordinates } + + enum CodingKeys: String, CodingKey { + case coordinates + } + + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + let encodedCoordinates = coordinates.map { ring in + ring.map { coordinate in + [coordinate.latitude, coordinate.longitude] + } + } + try container.encode(encodedCoordinates, forKey: .coordinates) + } + + init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let decodedCoordinates = try container.decode([[[Double]]].self, forKey: .coordinates) + self.coordinates = try decodedCoordinates.map { ring in + try ring.map { coordinateArray in + guard coordinateArray.count == 2 else { + throw DecodingError.dataCorruptedError( + forKey: .coordinates, + in: container, + debugDescription: "Each coordinate must have exactly two elements: latitude and longitude." + ) + } + return CLLocationCoordinate2D(latitude: coordinateArray[0], longitude: coordinateArray[1]) + } + } + } } protocol AccessibilityFeatureProtocol: Identifiable, Equatable { diff --git a/IOSAccessAssessment/AttributeEstimation/AttributeEstimationPipeline.swift b/IOSAccessAssessment/AttributeEstimation/AttributeEstimationPipeline.swift index 6dd23e5..89caf0b 100644 --- a/IOSAccessAssessment/AttributeEstimation/AttributeEstimationPipeline.swift +++ b/IOSAccessAssessment/AttributeEstimation/AttributeEstimationPipeline.swift @@ -173,21 +173,103 @@ extension AttributeEstimationPipeline { throw AttributeEstimationPipelineError.missingCaptureData } let captureImageDataConcrete = CaptureImageData(captureImageData) + return try getLocationFromCentroid( + depthMapProcessor: depthMapProcessor, + localizationProcessor: localizationProcessor, + captureImageData: captureImageDataConcrete, + deviceLocation: deviceLocation, + accessibilityFeature: accessibilityFeature + ) + } + + private func calculateLocationForLineString( + deviceLocation: CLLocationCoordinate2D, + accessibilityFeature: EditableAccessibilityFeature + ) throws -> LocationRequestResult { + guard let depthMapProcessor = self.depthMapProcessor else { + throw AttributeEstimationPipelineError.configurationError(Constants.Texts.depthMapProcessorKey) + } + guard let localizationProcessor = self.localizationProcessor else { + throw AttributeEstimationPipelineError.configurationError(Constants.Texts.localizationProcessorKey) + } + guard let captureImageData = self.captureImageData else { + throw AttributeEstimationPipelineError.missingCaptureData + } + let captureImageDataConcrete = CaptureImageData(captureImageData) + do { + return try getLocationFromTrapezoid( + depthMapProcessor: depthMapProcessor, + localizationProcessor: localizationProcessor, + captureImageData: captureImageDataConcrete, + deviceLocation: deviceLocation, + accessibilityFeature: accessibilityFeature + ) + } catch { + return try getLocationFromCentroid( + depthMapProcessor: depthMapProcessor, + localizationProcessor: localizationProcessor, + captureImageData: captureImageDataConcrete, + deviceLocation: deviceLocation, + accessibilityFeature: accessibilityFeature + ) + } + } + + private func calculateLocationForPolygon( + deviceLocation: CLLocationCoordinate2D, + accessibilityFeature: EditableAccessibilityFeature + ) throws -> LocationRequestResult { + guard let depthMapProcessor = self.depthMapProcessor else { + throw AttributeEstimationPipelineError.configurationError(Constants.Texts.depthMapProcessorKey) + } + guard let localizationProcessor = self.localizationProcessor else { + throw AttributeEstimationPipelineError.configurationError(Constants.Texts.localizationProcessorKey) + } + guard let captureImageData = self.captureImageData else { + throw AttributeEstimationPipelineError.missingCaptureData + } + let captureImageDataConcrete = CaptureImageData(captureImageData) + do { + return try getLocationFromPolygon( + depthMapProcessor: depthMapProcessor, + localizationProcessor: localizationProcessor, + captureImageData: captureImageDataConcrete, + deviceLocation: deviceLocation, + accessibilityFeature: accessibilityFeature + ) + } catch { + return try getLocationFromCentroid( + depthMapProcessor: depthMapProcessor, + localizationProcessor: localizationProcessor, + captureImageData: captureImageDataConcrete, + deviceLocation: deviceLocation, + accessibilityFeature: accessibilityFeature + ) + } + } + + private func getLocationFromCentroid( + depthMapProcessor: DepthMapProcessor, + localizationProcessor: LocalizationProcessor, + captureImageData: CaptureImageData, + deviceLocation: CLLocationCoordinate2D, + accessibilityFeature: EditableAccessibilityFeature + ) throws -> LocationRequestResult { let featureDepthValue = try depthMapProcessor.getFeatureDepthAtCentroidInRadius( detectedFeature: accessibilityFeature, radius: 3 ) let featureCentroid = accessibilityFeature.contourDetails.centroid let locationDelta = localizationProcessor.calculateDelta( point: featureCentroid, depth: featureDepthValue, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics ) let locationCoordinate = localizationProcessor.calculateLocation( point: featureCentroid, depth: featureDepthValue, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics, + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics, deviceLocation: deviceLocation ) let coordinates: [[CLLocationCoordinate2D]] = [[locationCoordinate]] @@ -196,20 +278,13 @@ extension AttributeEstimationPipeline { ) } - private func calculateLocationForLineString( + private func getLocationFromTrapezoid( + depthMapProcessor: DepthMapProcessor, + localizationProcessor: LocalizationProcessor, + captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature ) throws -> LocationRequestResult { - guard let depthMapProcessor = self.depthMapProcessor else { - throw AttributeEstimationPipelineError.configurationError(Constants.Texts.depthMapProcessorKey) - } - guard let localizationProcessor = self.localizationProcessor else { - throw AttributeEstimationPipelineError.configurationError(Constants.Texts.localizationProcessorKey) - } - guard let captureImageData = self.captureImageData else { - throw AttributeEstimationPipelineError.missingCaptureData - } - let captureImageDataConcrete = CaptureImageData(captureImageData) let trapezoidBoundPoints = accessibilityFeature.contourDetails.normalizedPoints guard trapezoidBoundPoints.count == 4 else { throw AttributeEstimationPipelineError.invalidAttributeData @@ -231,17 +306,17 @@ extension AttributeEstimationPipeline { let locationDeltas: [SIMD2] = pointsWithDepth.map { pointWithDepth in return localizationProcessor.calculateDelta( point: pointWithDepth.point, depth: pointWithDepth.depth, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics ) } let locationCoordinates: [CLLocationCoordinate2D] = pointsWithDepth.map { pointWithDepth in return localizationProcessor.calculateLocation( point: pointWithDepth.point, depth: pointWithDepth.depth, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics, + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics, deviceLocation: deviceLocation ) } @@ -253,20 +328,13 @@ extension AttributeEstimationPipeline { ) } - private func calculateLocationForPolygon( + private func getLocationFromPolygon( + depthMapProcessor: DepthMapProcessor, + localizationProcessor: LocalizationProcessor, + captureImageData: CaptureImageData, deviceLocation: CLLocationCoordinate2D, accessibilityFeature: EditableAccessibilityFeature ) throws -> LocationRequestResult { - guard let depthMapProcessor = self.depthMapProcessor else { - throw AttributeEstimationPipelineError.configurationError(Constants.Texts.depthMapProcessorKey) - } - guard let localizationProcessor = self.localizationProcessor else { - throw AttributeEstimationPipelineError.configurationError(Constants.Texts.localizationProcessorKey) - } - guard let captureImageData = self.captureImageData else { - throw AttributeEstimationPipelineError.missingCaptureData - } - let captureImageDataConcrete = CaptureImageData(captureImageData) let polygonPoints = accessibilityFeature.contourDetails.normalizedPoints let leftMostPoint = polygonPoints.min { $0.x < $1.x } let rightMostPoint = polygonPoints.max { $0.x < $1.x } @@ -286,17 +354,17 @@ extension AttributeEstimationPipeline { let locationDeltas: [SIMD2] = pointsWithDepth.map { pointWithDepth in return localizationProcessor.calculateDelta( point: pointWithDepth.point, depth: pointWithDepth.depth, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics ) } let locationCoordinates: [CLLocationCoordinate2D] = pointsWithDepth.map { pointWithDepth in return localizationProcessor.calculateLocation( point: pointWithDepth.point, depth: pointWithDepth.depth, - imageSize: captureImageDataConcrete.originalSize, - cameraTransform: captureImageDataConcrete.cameraTransform, - cameraIntrinsics: captureImageDataConcrete.cameraIntrinsics, + imageSize: captureImageData.originalSize, + cameraTransform: captureImageData.cameraTransform, + cameraIntrinsics: captureImageData.cameraIntrinsics, deviceLocation: deviceLocation ) } diff --git a/IOSAccessAssessment/LocalDataset/ARCamera/CameraIntrinsicsEncoder.swift b/IOSAccessAssessment/LocalDataset/ARCamera/CameraIntrinsicsEncoder.swift new file mode 100644 index 0000000..7bdd057 --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/ARCamera/CameraIntrinsicsEncoder.swift @@ -0,0 +1,60 @@ +// +// CameraIntrinsicsEncoder.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/23/25. +// + +import Foundation + +enum CameraIntrinsicsEncoderError: Error, LocalizedError { + case fileCreationFailed + case dataWriteFailed + + var errorDescription: String? { + switch self { + case .fileCreationFailed: + return "Unable to create camera intrinsics file." + case .dataWriteFailed: + return "Failed to write data to camera intrinsics file." + } + } +} + +class CameraIntrinsicsEncoder { + private let path: URL + let fileHandle: FileHandle + + init(url: URL) throws { + self.path = url + try "".write(to: self.path, atomically: true, encoding: .utf8) + self.fileHandle = try FileHandle(forWritingTo: self.path) + guard let header = "timestamp, frame, fx, sx, cx, sy, fy, cy, i02, i12, i22\n".data(using: .utf8) else { + throw CameraIntrinsicsEncoderError.fileCreationFailed + } + try self.fileHandle.write(contentsOf: header) + } + + func add(intrinsics: simd_float3x3, timestamp: TimeInterval, frameNumber: UUID) throws { + let fx = intrinsics[0,0] + let sx = intrinsics[1,0] + let cx = intrinsics[2,0] + let sy = intrinsics[0,1] + let fy = intrinsics[1,1] + let cy = intrinsics[2,1] + let i02 = intrinsics[0,2] + let i12 = intrinsics[1,2] + let i22 = intrinsics[2,2] + + let frameNumber = String(frameNumber.uuidString) + let line = "\(timestamp), \(frameNumber), \(fx), \(sx), \(cx), \(sy), \(fy), \(cy), \(i02), \(i12), \(i22)\n" + guard let lineData = line.data(using: .utf8) else { + throw CameraIntrinsicsEncoderError.dataWriteFailed + } + try self.fileHandle.write(contentsOf: lineData) + } + + func done() throws { + try self.fileHandle.close() + } +} diff --git a/IOSAccessAssessment/LocalDataset/CameraTransformEncoder.swift b/IOSAccessAssessment/LocalDataset/ARCamera/CameraTransformEncoder.swift similarity index 72% rename from IOSAccessAssessment/LocalDataset/CameraTransformEncoder.swift rename to IOSAccessAssessment/LocalDataset/ARCamera/CameraTransformEncoder.swift index a46b754..ad28640 100644 --- a/IOSAccessAssessment/LocalDataset/CameraTransformEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/ARCamera/CameraTransformEncoder.swift @@ -6,16 +6,17 @@ // import Foundation -import Accelerate -import ARKit enum CameraTransformEncoderError: Error, LocalizedError { - case unableToCreateFile + case fileCreationFailed + case dataWriteFailed var errorDescription: String? { switch self { - case .unableToCreateFile: + case .fileCreationFailed: return "Unable to create camera transform file." + case .dataWriteFailed: + return "Failed to write data to camera transform file." } } } @@ -29,12 +30,12 @@ class CameraTransformEncoder { try "".write(to: self.path, atomically: true, encoding: .utf8) self.fileHandle = try FileHandle(forWritingTo: self.path) guard let header = "timestamp, frame, rxx, rxy, rxz, ryx, ryy, ryz, rzx, rzy, rzz, x, y, z\n".data(using: .utf8) else { - throw CameraTransformEncoderError.unableToCreateFile + throw CameraTransformEncoderError.fileCreationFailed } - self.fileHandle.write(header) + try self.fileHandle.write(contentsOf: header) } - func add(transform: simd_float4x4, timestamp: TimeInterval, frameNumber: UUID) { + func add(transform: simd_float4x4, timestamp: TimeInterval, frameNumber: UUID) throws { let rotationX = transform.columns.0 let rotationY = transform.columns.1 let rotationZ = transform.columns.2 @@ -42,7 +43,10 @@ class CameraTransformEncoder { let frameNumber = String(frameNumber.uuidString) let line = "\(timestamp), \(frameNumber), \(rotationX.x), \(rotationX.y), \(rotationX.z), \(rotationY.x), \(rotationY.y), \(rotationY.z), \(rotationZ.x), \(rotationZ.y), \(rotationZ.z), \(translation.x), \(translation.y), \(translation.z)\n" - self.fileHandle.write(line.data(using: .utf8)!) + guard let lineData = line.data(using: .utf8) else { + throw CameraTransformEncoderError.dataWriteFailed + } + try self.fileHandle.write(contentsOf: lineData) } func done() throws { diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift new file mode 100644 index 0000000..0f8da17 --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/AccessibilityFeatureEncoder.swift @@ -0,0 +1,136 @@ +// +// AccessibilityFeatureEncoder.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/23/25. +// + +import Foundation + +enum AccessibilityFeatureEncoderError: Error, LocalizedError { + case fileCreationFailed + case dataWriteFailed + + var errorDescription: String? { + switch self { + case .fileCreationFailed: + return "Unable to create feature data file." + case .dataWriteFailed: + return "Failed to write feature data to file." + } + } +} + +final class AccessibilityFeatureFile { + private let url: URL + + private var snapshot: AccessibilityFeatureSnapshot + + init(url: URL, frameNumber: UUID, timestamp: TimeInterval, feature: EditableAccessibilityFeature) throws { + self.url = url + + if FileManager.default.fileExists(atPath: url.path) { + /// Load existing snapshot + let data = try Data(contentsOf: url) + let decoder = JSONDecoder() + decoder.dateDecodingStrategy = .iso8601 + let featureSnapshot = try decoder.decode(AccessibilityFeatureSnapshot.self, from: data) + self.snapshot = featureSnapshot + } else { + let featureSnapshot = AccessibilityFeatureSnapshot(from: feature) + self.snapshot = featureSnapshot + } + try self.update(frameNumber: frameNumber, timestamp: timestamp, feature: feature) + } + + func update(frameNumber: UUID, timestamp: TimeInterval, feature: any AccessibilityFeatureProtocol) throws { + self.snapshot.update(frame: frameNumber, timestamp: timestamp) + if let editableFeature = feature as? EditableAccessibilityFeature { + self.snapshot.update(from: editableFeature) + } else if let mappedFeature = feature as? MappedAccessibilityFeature { + self.snapshot.update(from: mappedFeature) + } else { + print("Unsupported feature type for update: \(type(of: feature))") + } + try self.flush() + } + + func flush() throws { + let encoder = JSONEncoder() + encoder.dateEncodingStrategy = .iso8601 + encoder.outputFormatting = [.prettyPrinted, .sortedKeys] + + let data = try encoder.encode(snapshot) + + let tmpURL = url.appendingPathExtension("tmp") + try data.write(to: tmpURL, options: [.atomic]) + + _ = try FileManager.default.replaceItemAt( + url, + withItemAt: tmpURL, + backupItemName: nil, + options: .usingNewMetadataOnly + ) + } +} + +class AccessibilityFeatureEncoder { + private let baseDirectory: URL + private var fileStore: [UUID: AccessibilityFeatureFile] = [:] + + init(outDirectory: URL) throws { + self.baseDirectory = outDirectory + try FileManager.default.createDirectory(at: outDirectory.absoluteURL, withIntermediateDirectories: true, attributes: nil) + } + + func insert(features: [EditableAccessibilityFeature], frameNumber: UUID, timestamp: TimeInterval) throws { + try features.forEach { feature in + if let featureFile = self.fileStore[feature.id] { + /// Update existing file + try featureFile.update(frameNumber: frameNumber, timestamp: timestamp, feature: feature) + } else { + /// Create new file + let featureFileURL = self.baseDirectory + .appendingPathComponent(feature.id.uuidString, isDirectory: false) + .appendingPathExtension("json") + let newFeatureFile = try AccessibilityFeatureFile( + url: featureFileURL, + frameNumber: frameNumber, + timestamp: timestamp, + feature: feature + ) + self.fileStore[feature.id] = newFeatureFile + } + } + } + + func update(features: [any AccessibilityFeatureProtocol], frameNumber: UUID, timestamp: TimeInterval) throws { + try features.forEach { feature in + if let featureFile = self.fileStore[feature.id] { + /// Update existing file + try featureFile.update(frameNumber: frameNumber, timestamp: timestamp, feature: feature) + } else if let editableFeature = feature as? EditableAccessibilityFeature { + /// Create new file for editable feature + let featureFileURL = self.baseDirectory + .appendingPathComponent(editableFeature.id.uuidString, isDirectory: false) + .appendingPathExtension("json") + let newFeatureFile = try AccessibilityFeatureFile( + url: featureFileURL, + frameNumber: frameNumber, + timestamp: timestamp, + feature: editableFeature + ) + self.fileStore[editableFeature.id] = newFeatureFile + } else { + print("Unsupported feature type for creation: \(type(of: feature))") + } + } + } + + func done() throws { + for (_, featureFile) in fileStore { + try featureFile.flush() + } + self.fileStore.removeAll() + } +} diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift new file mode 100644 index 0000000..1c2a1ff --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureClassSnapshot.swift @@ -0,0 +1,25 @@ +// +// AccessibilityFeatureClassSnapshot.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/23/25. +// + +import Foundation +import CoreLocation + +struct AccessibilityFeatureClassSnapshot: Codable, Identifiable, Sendable { + let id: String + let name: String + + init(from accessibilityFeatureClass: AccessibilityFeatureClass) { + self.id = accessibilityFeatureClass.id + self.name = accessibilityFeatureClass.name + } + + /// Get AccessibilityFeatureClass from snapshot + func getAccessibilityFeatureClass() -> AccessibilityFeatureClass? { + let matchedClass = Constants.SelectedAccessibilityFeatureConfig.classes.first { $0.id == self.id } + return matchedClass + } +} diff --git a/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift new file mode 100644 index 0000000..b764729 --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/AccessibilityFeature/Components/AccessibilityFeatureSnapshot.swift @@ -0,0 +1,60 @@ +// +// AccessibilityFeatureSnapshot.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/23/25. +// + +import Foundation +import CoreLocation + +struct AccessibilityFeatureSnapshot: Codable, Identifiable, Sendable { + var frames: [UUID] = [] + var timestamp: TimeInterval? /// last updated timestamp + + let id: UUID + + let accessibilityFeatureClass: AccessibilityFeatureClassSnapshot + + let contourDetails: ContourDetails + + var selectedAnnotationOption: String + + var locationDetails: LocationDetails? + var calculatedAttributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] + var attributeValues: [AccessibilityFeatureAttribute: AccessibilityFeatureAttribute.Value?] + var experimentalAttributeValues: [AccessibilityFeatureAttribute : AccessibilityFeatureAttribute.Value?] + + var oswElement: String? + + init(from accessibilityFeature: EditableAccessibilityFeature) { + self.id = accessibilityFeature.id + self.accessibilityFeatureClass = .init(from: accessibilityFeature.accessibilityFeatureClass) + self.contourDetails = accessibilityFeature.contourDetails + self.selectedAnnotationOption = accessibilityFeature.selectedAnnotationOption.rawValue + self.locationDetails = accessibilityFeature.locationDetails + self.calculatedAttributeValues = accessibilityFeature.calculatedAttributeValues + self.attributeValues = accessibilityFeature.attributeValues + self.experimentalAttributeValues = accessibilityFeature.experimentalAttributeValues + } + + mutating func update(from accessibilityFeature: EditableAccessibilityFeature) { + self.selectedAnnotationOption = accessibilityFeature.selectedAnnotationOption.rawValue + self.locationDetails = accessibilityFeature.locationDetails + self.calculatedAttributeValues = accessibilityFeature.calculatedAttributeValues + self.attributeValues = accessibilityFeature.attributeValues + self.experimentalAttributeValues = accessibilityFeature.experimentalAttributeValues + } + + mutating func update(from accessibilityFeature: MappedAccessibilityFeature) { + self.locationDetails = accessibilityFeature.locationDetails + self.attributeValues = accessibilityFeature.attributeValues + self.experimentalAttributeValues = accessibilityFeature.experimentalAttributeValues + self.oswElement = accessibilityFeature.oswElement.id + } + + mutating func update(frame: UUID, timestamp: TimeInterval) { + self.frames.append(frame) + self.timestamp = timestamp + } +} diff --git a/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift b/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift index c4f270f..a2a3d28 100644 --- a/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/DatasetEncoder.swift @@ -23,23 +23,27 @@ class DatasetEncoder { private var datasetDirectory: URL private var savedFrames: Int = 0 - public let rgbFilePath: URL // Relative to app document directory. - public let depthFilePath: URL // Relative to app document directory. - public let segmentationFilePath: URL // Relative to app document directory. - public let confidenceFilePath: URL // Relative to app document directory. + public let rgbFilePath: URL /// Relative to app document directory. + public let depthFilePath: URL /// Relative to app document directory. + public let segmentationFilePath: URL /// Relative to app document directory. + public let confidenceFilePath: URL /// Relative to app document directory. + public let cameraIntrinsicsPath: URL public let cameraMatrixPath: URL public let cameraTransformPath: URL public let locationPath: URL // public let headingPath: URL + public let accessibilityFeaturePath: URL public let otherDetailsPath: URL private let rgbEncoder: RGBEncoder private let depthEncoder: DepthEncoder private let segmentationEncoder: SegmentationEncoder private let confidenceEncoder: ConfidenceEncoder + private let cameraIntrinsicsEncoder: CameraIntrinsicsEncoder private let cameraTransformEncoder: CameraTransformEncoder private let locationEncoder: LocationEncoder // private let headingEncoder: HeadingEncoder + private let accessibilityFeatureEncoder: AccessibilityFeatureEncoder private let otherDetailsEncoder: OtherDetailsEncoder public var capturedFrameIds: Set = [] @@ -47,28 +51,32 @@ class DatasetEncoder { init(workspaceId: String, changesetId: String) throws { self.workspaceId = workspaceId - // Create workspace Directory if it doesn't exist + /// Create workspace Directory if it doesn't exist self.workspaceDirectory = try DatasetEncoder.createDirectory(id: workspaceId) - // if workspace directory exists, create dataset directory inside it + /// if workspace directory exists, create dataset directory inside it datasetDirectory = try DatasetEncoder.createDirectory(id: changesetId, relativeTo: self.workspaceDirectory) self.rgbFilePath = datasetDirectory.appendingPathComponent("rgb", isDirectory: true) self.depthFilePath = datasetDirectory.appendingPathComponent("depth", isDirectory: true) self.segmentationFilePath = datasetDirectory.appendingPathComponent("segmentation", isDirectory: true) self.confidenceFilePath = datasetDirectory.appendingPathComponent("confidence", isDirectory: true) + self.cameraIntrinsicsPath = datasetDirectory.appendingPathComponent("camera_intrinsics.csv", isDirectory: false) self.cameraMatrixPath = datasetDirectory.appendingPathComponent("camera_matrix.csv", isDirectory: false) self.cameraTransformPath = datasetDirectory.appendingPathComponent("camera_transform.csv", isDirectory: false) self.locationPath = datasetDirectory.appendingPathComponent("location.csv", isDirectory: false) // self.headingPath = datasetDirectory.appendingPathComponent("heading.csv", isDirectory: false) + self.accessibilityFeaturePath = datasetDirectory.appendingPathComponent("features", isDirectory: true) self.otherDetailsPath = datasetDirectory.appendingPathComponent("other_details.csv", isDirectory: false) self.rgbEncoder = try RGBEncoder(outDirectory: self.rgbFilePath) self.depthEncoder = try DepthEncoder(outDirectory: self.depthFilePath) self.segmentationEncoder = try SegmentationEncoder(outDirectory: self.segmentationFilePath) self.confidenceEncoder = try ConfidenceEncoder(outDirectory: self.confidenceFilePath) + self.cameraIntrinsicsEncoder = try CameraIntrinsicsEncoder(url: self.cameraIntrinsicsPath) self.cameraTransformEncoder = try CameraTransformEncoder(url: self.cameraTransformPath) self.locationEncoder = try LocationEncoder(url: self.locationPath) // self.headingEncoder = HeadingEncoder(url: self.headingPath) + self.accessibilityFeatureEncoder = try AccessibilityFeatureEncoder(outDirectory: self.accessibilityFeaturePath) self.otherDetailsEncoder = try OtherDetailsEncoder(url: self.otherDetailsPath) } @@ -79,7 +87,7 @@ class DatasetEncoder { } let directory = URL(filePath: id, directoryHint: .isDirectory, relativeTo: relativeTo) if FileManager.default.fileExists(atPath: directory.path) { - // Return existing directory if it already exists + /// Return existing directory if it already exists return directory } try FileManager.default.createDirectory(at: directory, withIntermediateDirectories: true, attributes: nil) @@ -136,8 +144,8 @@ class DatasetEncoder { if let confidenceImage = confidenceImage { try self.confidenceEncoder.save(ciImage: confidenceImage, frameNumber: frameNumber) } - self.cameraTransformEncoder.add(transform: cameraTransform, timestamp: timestamp, frameNumber: frameNumber) - self.writeIntrinsics(cameraIntrinsics: cameraIntrinsics) + try self.cameraIntrinsicsEncoder.add(intrinsics: cameraIntrinsics, timestamp: timestamp, frameNumber: frameNumber) + try self.cameraTransformEncoder.add(transform: cameraTransform, timestamp: timestamp, frameNumber: frameNumber) if let location = location { let latitude = location.latitude @@ -146,64 +154,28 @@ class DatasetEncoder { // let trueHeading = heading?.trueHeading ?? 0.0 let locationData = LocationData(timestamp: timestamp, latitude: latitude, longitude: longitude) // let headingData = HeadingData(timestamp: timestamp, magneticHeading: magneticHeading, trueHeading: trueHeading) - self.locationEncoder.add(locationData: locationData, frameNumber: frameNumber) + try self.locationEncoder.add(locationData: locationData, frameNumber: frameNumber) // self.headingEncoder.add(headingData: headingData, frameNumber: frameNumber) } - if let otherDetailsData = otherDetails { - self.otherDetailsEncoder.add(otherDetails: otherDetailsData, frameNumber: frameNumber) + try self.otherDetailsEncoder.add(otherDetails: otherDetailsData, frameNumber: frameNumber) } /// TODO: Add error handling for each encoder - /// Add a capture point to the TDEI workspaces -// uploadCapturePoint(location: (latitude: latitude, longitude: longitude), frameId: frameId) - savedFrames = savedFrames + 1 self.capturedFrameIds.insert(frameNumber) } - private func writeIntrinsics(cameraIntrinsics: simd_float3x3) { - let rows = cameraIntrinsics.transpose.columns - var csv: [String] = [] - for row in [rows.0, rows.1, rows.2] { - let csvLine = "\(row.x), \(row.y), \(row.z)" - csv.append(csvLine) - } - let contents = csv.joined(separator: "\n") - do { - try contents.write(to: self.cameraMatrixPath, atomically: true, encoding: String.Encoding.utf8) - } catch let error { - print("Could not write camera matrix. \(error.localizedDescription)") - } + public func addFeatures(features: [any AccessibilityFeatureProtocol], frameNumber: UUID, timestamp: TimeInterval) throws { + try self.accessibilityFeatureEncoder.update(features: features, frameNumber: frameNumber, timestamp: timestamp) } func save() throws { + try self.cameraIntrinsicsEncoder.done() try self.cameraTransformEncoder.done() try self.locationEncoder.done() // self.headingEncoder.done() + try self.accessibilityFeatureEncoder.done() } - -// func uploadCapturePoint(location: (latitude: CLLocationDegrees, longitude: CLLocationDegrees)?, frameId: UUID) { -// guard let nodeLatitude = location?.latitude, -// let nodeLongitude = location?.longitude -// else { return } -// -// var tags: [String: String] = [APIConstants.TagKeys.amenityKey: APIConstants.OtherConstants.capturePointAmenity] -// tags[APIConstants.TagKeys.captureIdKey] = frameId.uuidString -// tags[APIConstants.TagKeys.captureLatitudeKey] = String(format: "%.7f", nodeLatitude) -// tags[APIConstants.TagKeys.captureLongitudeKey] = String(format: "%.7f", nodeLongitude) -// -// let nodeData = NodeData(latitude: nodeLatitude, longitude: nodeLongitude, tags: tags) -// let nodeDataOperations: [ChangesetDiffOperation] = [ChangesetDiffOperation.create(nodeData)] -// -// ChangesetService.shared.performUpload(workspaceId: workspaceId, operations: nodeDataOperations) { result in -// switch result { -// case .success(_): -// print("Changes uploaded successfully.") -// case .failure(let error): -// print("Failed to upload changes: \(error.localizedDescription)") -// } -// } -// } } diff --git a/IOSAccessAssessment/LocalDataset/ConfidenceEncoder.swift b/IOSAccessAssessment/LocalDataset/Image/ConfidenceEncoder.swift similarity index 100% rename from IOSAccessAssessment/LocalDataset/ConfidenceEncoder.swift rename to IOSAccessAssessment/LocalDataset/Image/ConfidenceEncoder.swift diff --git a/IOSAccessAssessment/LocalDataset/DepthEncoder.swift b/IOSAccessAssessment/LocalDataset/Image/DepthEncoder.swift similarity index 100% rename from IOSAccessAssessment/LocalDataset/DepthEncoder.swift rename to IOSAccessAssessment/LocalDataset/Image/DepthEncoder.swift diff --git a/IOSAccessAssessment/LocalDataset/RGBEncoder.swift b/IOSAccessAssessment/LocalDataset/Image/RGBEncoder.swift similarity index 100% rename from IOSAccessAssessment/LocalDataset/RGBEncoder.swift rename to IOSAccessAssessment/LocalDataset/Image/RGBEncoder.swift diff --git a/IOSAccessAssessment/LocalDataset/SegmentationEncoder.swift b/IOSAccessAssessment/LocalDataset/Image/SegmentationEncoder.swift similarity index 100% rename from IOSAccessAssessment/LocalDataset/SegmentationEncoder.swift rename to IOSAccessAssessment/LocalDataset/Image/SegmentationEncoder.swift diff --git a/IOSAccessAssessment/LocalDataset/Location/HeadingEncoder.swift b/IOSAccessAssessment/LocalDataset/Location/HeadingEncoder.swift new file mode 100644 index 0000000..931e33a --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/Location/HeadingEncoder.swift @@ -0,0 +1,60 @@ +// +// HeadingEncoder.swift +// IOSAccessAssessment +// +// Created by Himanshu on 12/23/25. +// + +import Foundation + +struct HeadingData { + let timestamp: TimeInterval + let magneticHeading: Double + let trueHeading: Double +// let headingAccuracy: Double +} + +enum HeadingEncoderError: Error, LocalizedError { + case fileCreationFailed + case dataWriteFailed + + var errorDescription: String? { + switch self { + case .fileCreationFailed: + return "Unable to create heading data file." + case .dataWriteFailed: + return "Failed to write heading data to file." + } + } +} + +class HeadingEncoder { + let path: URL + let fileHandle: FileHandle + + init(url: URL) throws { + self.path = url + FileManager.default.createFile(atPath: self.path.absoluteString, contents:Data("".utf8), attributes: nil) + try "".write(to: self.path, atomically: true, encoding: .utf8) + self.fileHandle = try FileHandle(forWritingTo: self.path) + guard let header = "timestamp, frame, magnetic_heading, true_heading\n".data(using: .utf8) else { + throw HeadingEncoderError.fileCreationFailed + } +// , heading_accuracy\n" + try self.fileHandle.write(contentsOf: header) + } + + func add(headingData: HeadingData, frameNumber: UUID) throws { + let frameNumber = String(frameNumber.uuidString) + let line = "\(headingData.timestamp), \(frameNumber) \(headingData.magneticHeading), \(headingData.trueHeading)\n" +// , \(headingData.headingAccuracy)\n" + guard let lineData = line.data(using: .utf8) else { + throw HeadingEncoderError.dataWriteFailed + } + try self.fileHandle.write(contentsOf: lineData) + } + + func done() throws { + try self.fileHandle.close() + } +} diff --git a/IOSAccessAssessment/LocalDataset/Location/LocationEncoder.swift b/IOSAccessAssessment/LocalDataset/Location/LocationEncoder.swift new file mode 100644 index 0000000..695b881 --- /dev/null +++ b/IOSAccessAssessment/LocalDataset/Location/LocationEncoder.swift @@ -0,0 +1,64 @@ +// +// LocationEncoder.swift +// IOSAccessAssessment +// +// Created by Himanshu on 7/4/25. +// +import Foundation + +struct LocationData { + let timestamp: TimeInterval + let latitude: Double + let longitude: Double +// let altitude: Double +// let horizontalAccuracy: Double +// let verticalAccuracy: Double +// let speed: Double +// let course: Double +// let floorLevel: Int +} + +enum LocationEncoderError: Error, LocalizedError { + case fileCreationFailed + case dataWriteFailed + + var errorDescription: String? { + switch self { + case .fileCreationFailed: + return "Unable to create location data file." + case .dataWriteFailed: + return "Failed to write location data to file." + } + } +} + +class LocationEncoder { + let path: URL + let fileHandle: FileHandle + + init(url: URL) throws { + self.path = url + FileManager.default.createFile(atPath: self.path.absoluteString, contents:Data("".utf8), attributes: nil) + try "".write(to: self.path, atomically: true, encoding: .utf8) + self.fileHandle = try FileHandle(forWritingTo: self.path) + guard let header = "timestamp, frame, latitude, longitude\n".data(using: .utf8) else { + throw LocationEncoderError.fileCreationFailed + } +// , altitude, horizontal_accuracy, vertical_accuracy, speed, course, floor_level\n" + try self.fileHandle.write(contentsOf: header) + } + + func add(locationData: LocationData, frameNumber: UUID) throws { + let frameNumber = String(frameNumber.uuidString) + let line = "\(locationData.timestamp), \(frameNumber), \(locationData.latitude), \(locationData.longitude)\n" +// , \(locationData.altitude), \(locationData.horizontalAccuracy), \(locationData.verticalAccuracy), \(locationData.speed), \(locationData.course), \(locationData.floorLevel)\n" + guard let lineData = line.data(using: .utf8) else { + throw LocationEncoderError.dataWriteFailed + } + try self.fileHandle.write(contentsOf: lineData) + } + + func done() throws { + try self.fileHandle.close() + } +} diff --git a/IOSAccessAssessment/LocalDataset/LocationEncoder.swift b/IOSAccessAssessment/LocalDataset/LocationEncoder.swift deleted file mode 100644 index c5e07a2..0000000 --- a/IOSAccessAssessment/LocalDataset/LocationEncoder.swift +++ /dev/null @@ -1,78 +0,0 @@ -// -// LocationEncoder.swift -// IOSAccessAssessment -// -// Created by Himanshu on 7/4/25. -// -import Foundation - -struct LocationData { - let timestamp: TimeInterval - let latitude: Double - let longitude: Double -// let altitude: Double -// let horizontalAccuracy: Double -// let verticalAccuracy: Double -// let speed: Double -// let course: Double -// let floorLevel: Int -} - -struct HeadingData { - let timestamp: TimeInterval - let magneticHeading: Double - let trueHeading: Double -// let headingAccuracy: Double -} - -class LocationEncoder { - let path: URL - let fileHandle: FileHandle - - init(url: URL) throws { - self.path = url - FileManager.default.createFile(atPath: self.path.absoluteString, contents:Data("".utf8), attributes: nil) - try "".write(to: self.path, atomically: true, encoding: .utf8) - self.fileHandle = try FileHandle(forWritingTo: self.path) - let heading: String = "timestamp, frame, latitude, longitude\n" -// , altitude, horizontal_accuracy, vertical_accuracy, speed, course, floor_level\n" - self.fileHandle.write(heading.data(using: .utf8)!) - } - - func add(locationData: LocationData, frameNumber: UUID) { - let frameNumber = String(frameNumber.uuidString) - let line = "\(locationData.timestamp), \(frameNumber), \(locationData.latitude), \(locationData.longitude)\n" -// , \(locationData.altitude), \(locationData.horizontalAccuracy), \(locationData.verticalAccuracy), \(locationData.speed), \(locationData.course), \(locationData.floorLevel)\n" - self.fileHandle.write(line.data(using: .utf8)!) - } - - func done() throws { - try self.fileHandle.close() - } -} - -class HeadingEncoder { - let path: URL - let fileHandle: FileHandle - - init(url: URL) throws { - self.path = url - FileManager.default.createFile(atPath: self.path.absoluteString, contents:Data("".utf8), attributes: nil) - try "".write(to: self.path, atomically: true, encoding: .utf8) - self.fileHandle = try FileHandle(forWritingTo: self.path) - let heading: String = "timestamp, frame, magnetic_heading, true_heading\n" -// , heading_accuracy\n" - self.fileHandle.write(heading.data(using: .utf8)!) - } - - func add(headingData: HeadingData, frameNumber: UUID) { - let frameNumber = String(frameNumber.uuidString) - let line = "\(headingData.timestamp), \(frameNumber) \(headingData.magneticHeading), \(headingData.trueHeading)\n" -// , \(headingData.headingAccuracy)\n" - self.fileHandle.write(line.data(using: .utf8)!) - } - - func done() throws { - try self.fileHandle.close() - } -} diff --git a/IOSAccessAssessment/LocalDataset/OtherDetailsEncoder.swift b/IOSAccessAssessment/LocalDataset/Others/OtherDetailsEncoder.swift similarity index 58% rename from IOSAccessAssessment/LocalDataset/OtherDetailsEncoder.swift rename to IOSAccessAssessment/LocalDataset/Others/OtherDetailsEncoder.swift index eb16445..8b17c47 100644 --- a/IOSAccessAssessment/LocalDataset/OtherDetailsEncoder.swift +++ b/IOSAccessAssessment/LocalDataset/Others/OtherDetailsEncoder.swift @@ -15,6 +15,20 @@ struct OtherDetailsData { let originalSize: CGSize } +enum OtherDetailsEncoderError: Error, LocalizedError { + case fileCreationFailed + case dataWriteFailed + + var errorDescription: String? { + switch self { + case .fileCreationFailed: + return "Unable to create other details file." + case .dataWriteFailed: + return "Failed to write details data to file." + } + } +} + class OtherDetailsEncoder { private let path: URL let fileHandle: FileHandle @@ -23,17 +37,23 @@ class OtherDetailsEncoder { self.path = url try "".write(to: self.path, atomically: true, encoding: .utf8) self.fileHandle = try FileHandle(forWritingTo: self.path) - self.fileHandle.write("timestamp, frame, deviceOrientation, originalWidth, originalHeight\n".data(using: .utf8)!) + guard let header = "timestamp, frame, deviceOrientation, originalWidth, originalHeight\n".data(using: .utf8) else { + throw OtherDetailsEncoderError.fileCreationFailed + } + try self.fileHandle.write(contentsOf: header) } - func add(otherDetails: OtherDetailsData, frameNumber: UUID) { + func add(otherDetails: OtherDetailsData, frameNumber: UUID) throws { let frameNumber = String(frameNumber.uuidString) let deviceOrientationString: String = String(otherDetails.deviceOrientation.rawValue) let originalWidth = String(Float(otherDetails.originalSize.width)) let originalHeight = String(Float(otherDetails.originalSize.height)) let line = "\(otherDetails.timestamp), \(frameNumber), \(deviceOrientationString), \(originalWidth), \(originalHeight)\n" - self.fileHandle.write(line.data(using: .utf8)!) + guard let lineData = line.data(using: .utf8) else { + throw OtherDetailsEncoderError.dataWriteFailed + } + try self.fileHandle.write(contentsOf: lineData) } func done() throws { diff --git a/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift b/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift index 682d63e..17deb9b 100644 --- a/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift +++ b/IOSAccessAssessment/TDEI/OSW/OSWElementClass.swift @@ -42,17 +42,20 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { } struct Metadata: Sendable { + let name: String let description: String let parent: OSWElementClass? let geometry: OSWGeometry let identifyingFields: [IdentifyingField] init( + name: String, description: String, parent: OSWElementClass? = nil, geometry: OSWGeometry, identifyingFields: [IdentifyingField] = [] ) { + self.name = name self.description = description self.parent = parent self.geometry = geometry @@ -64,12 +67,14 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { switch self { case .BareNode: return Metadata( + name: "Bare Node", description: "A special case of an abstract Node.", parent: nil, geometry: .point ) case .Footway: return Metadata( + name: "Footway", description: "The centerline of a dedicated pedestrian path that does not fall into any other subcategories.", parent: nil, geometry: .linestring, @@ -79,6 +84,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .Sidewalk: return Metadata( + name: "Sidewalk", description: "The centerline of a sidewalk, a designated pedestrian path to the side of a street.", parent: .Footway, geometry: .linestring, @@ -89,6 +95,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .Building: return Metadata( + name: "Building", description: "This field is used to mark a given entity as a building", parent: nil, geometry: .polygon, @@ -100,6 +107,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .Pole: return Metadata( + name: "Pole", description: "Pole", parent: nil, geometry: .point, @@ -109,6 +117,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .TrafficLight: return Metadata( + name: "Traffic Light", description: "Traffic Light", parent: nil, geometry: .point, @@ -119,6 +128,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .TrafficSign: return Metadata( + name: "Traffic Sign", description: "Traffic Sign", parent: nil, geometry: .point, @@ -128,6 +138,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .Vegetation: return Metadata( + name: "Vegetation", description: "Vegetation", parent: nil, geometry: .point, @@ -137,6 +148,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { ) case .AppAnchorNode: return Metadata( + name: "App Anchor Node", description: "A point used for iOSPointMapper-specific analysis and mapping purposes.", parent: nil, geometry: .point, @@ -154,7 +166,7 @@ enum OSWElementClass: String, CaseIterable, Hashable, Sendable, Codable { */ extension OSWElementClass { var description: String { - return metadata.description + return metadata.name } var parent: OSWElementClass? { diff --git a/IOSAccessAssessment/View/ARCameraView.swift b/IOSAccessAssessment/View/ARCameraView.swift index 8d113fb..13444cc 100644 --- a/IOSAccessAssessment/View/ARCameraView.swift +++ b/IOSAccessAssessment/View/ARCameraView.swift @@ -256,13 +256,15 @@ struct ARCameraView: View { captureImageData: any CaptureImageDataProtocol, location: CLLocationCoordinate2D? ) { - do { - try sharedAppData.currentDatasetEncoder?.addCaptureData( - captureImageData: captureImageData, - location: captureLocation - ) - } catch { - print("Error adding capture data to dataset encoder: \(error)") + Task { + do { + try sharedAppData.currentDatasetEncoder?.addCaptureData( + captureImageData: captureImageData, + location: captureLocation + ) + } catch { + print("Error adding capture data to dataset encoder: \(error)") + } } } diff --git a/IOSAccessAssessment/View/AnnotationView.swift b/IOSAccessAssessment/View/AnnotationView.swift index 56c85cc..7822c25 100644 --- a/IOSAccessAssessment/View/AnnotationView.swift +++ b/IOSAccessAssessment/View/AnnotationView.swift @@ -238,8 +238,8 @@ struct AnnotationView: View { let apiTransmissionController: APITransmissionController = APITransmissionController() - @State private var managerStatusViewModel = AnnotationViewStatusViewModel() - @State private var apiTransmissionStatusViewModel = APITransmissionStatusViewModel() + @StateObject private var managerStatusViewModel = AnnotationViewStatusViewModel() + @StateObject private var apiTransmissionStatusViewModel = APITransmissionStatusViewModel() @State private var interfaceOrientation: UIInterfaceOrientation = .portrait // To bind one-way with manager's orientation @StateObject var featureClassSelectionViewModel = AnnotationFeatureClassSelectionViewModel() @@ -319,7 +319,8 @@ struct AnnotationView: View { }, message: { Text(managerStatusViewModel.errorMessage) }) - .alert(AnnotationViewConstants.Texts.managerStatusAlertTitleKey, isPresented: $apiTransmissionStatusViewModel.isFailed, actions: { + .alert(AnnotationViewConstants.Texts.apiTransmissionStatusAlertTitleKey, + isPresented: $apiTransmissionStatusViewModel.isFailed, actions: { Button(AnnotationViewConstants.Texts.managerStatusAlertDismissButtonKey) { apiTransmissionStatusViewModel.update(isFailed: false, errorMessage: "") do { @@ -576,7 +577,7 @@ struct AnnotationView: View { private func confirmAnnotation() { Task { do { - let apiTransmissionResults = try await uploadAnnotations() + let apiTransmissionResults = try await uploadFeatures() if let apiTransmissionResults, apiTransmissionResults.failedFeatureUploads > 0 { throw AnnotationViewError.apiTransmissionFailed(apiTransmissionResults) } @@ -608,7 +609,7 @@ struct AnnotationView: View { try featureClassSelectionViewModel.setCurrent(index: currentClassIndex + 1, classes: segmentedClasses) } - private func uploadAnnotations() async throws -> APITransmissionResults? { + private func uploadFeatures() async throws -> APITransmissionResults? { guard let currentCaptureDataRecord = sharedAppData.currentCaptureDataRecord else { throw AnnotationViewError.invalidCaptureDataRecord } @@ -643,13 +644,38 @@ struct AnnotationView: View { accessToken: accessToken ) guard let mappedAccessibilityFeatures = apiTransmissionResults.accessibilityFeatures else { - throw AnnotationViewError.uploadFailed + throw AnnotationViewError.apiTransmissionFailed(apiTransmissionResults) } sharedAppData.mappingData.updateFeatures(mappedAccessibilityFeatures, for: accessibilityFeatureClass) print("Mapping Data: \(sharedAppData.mappingData)") + + addFeaturesToCurrentDataset( + captureImageData: currentCaptureDataRecord, + featuresToUpload: featuresToUpload, mappedAccessibilityFeatures: mappedAccessibilityFeatures + ) + sharedAppData.isUploadReady = true return apiTransmissionResults } + + private func addFeaturesToCurrentDataset( + captureImageData: any CaptureImageDataProtocol, + featuresToUpload: [any AccessibilityFeatureProtocol], + mappedAccessibilityFeatures: [any AccessibilityFeatureProtocol] + ) { + Task { + do { + try sharedAppData.currentDatasetEncoder?.addFeatures( + features: featuresToUpload, frameNumber: captureImageData.id, timestamp: captureImageData.timestamp + ) + try sharedAppData.currentDatasetEncoder?.addFeatures( + features: mappedAccessibilityFeatures, frameNumber: captureImageData.id, timestamp: captureImageData.timestamp + ) + } catch { + print("Error adding feature data to dataset encoder: \(error)") + } + } + } } struct SelectFeatureLearnMoreSheetView: View { diff --git a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift index 7286b12..e529499 100644 --- a/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift +++ b/IOSAccessAssessment/View/SubView/AnnotationFeatureDetailView.swift @@ -18,6 +18,9 @@ struct AnnotationFeatureDetailView: View { static let statusAlertTitleKey: String = "Error" static let statusAlertDismissAlertSuffixKey: String = "Press OK to dismiss this alert." static let statusAlertDismissButtonKey: String = "OK" + + /// Invalid + static let invalidTextKey: String = "Invalid" } enum Images { @@ -101,24 +104,28 @@ struct AnnotationFeatureDetailView: View { /** Location Section */ - if let featureLocation = accessibilityFeature.getLastLocationCoordinate() { - Section(header: Text(AnnotationViewConstants.Texts.featureDetailViewLocationKey)) { + + Section(header: Text(AnnotationViewConstants.Texts.featureDetailViewLocationKey)) { + if let featureLocation = accessibilityFeature.getLastLocationCoordinate() { HStack { Spacer() Text( locationFormatter.string( from: NSNumber(value: featureLocation.latitude) - ) ?? "N/A" + ) ?? AnnotationFeatureDetailView.Constants.Texts.invalidTextKey ) - .padding(.horizontal) + .padding(.horizontal) Text( locationFormatter.string( from: NSNumber(value: featureLocation.longitude) - ) ?? "N/A" + ) ?? AnnotationFeatureDetailView.Constants.Texts.invalidTextKey ) - .padding(.horizontal) + .padding(.horizontal) Spacer() } + } else { + Text(AnnotationFeatureDetailView.Constants.Texts.invalidTextKey) + .foregroundStyle(.secondary) } } @@ -242,7 +249,7 @@ struct AnnotationFeatureDetailView: View { guard let attributeValue = accessibilityFeature.experimentalAttributeValues[attribute], let attributeValue, let attributeBindableValue = attributeValue.toDouble() else { - return "N/A" + return AnnotationFeatureDetailView.Constants.Texts.invalidTextKey } return String(attributeBindableValue) }()