From bdf9f809101b1157a604cad62b135681c8dc8e22 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:24:11 -0700 Subject: [PATCH 01/13] add ImageResizer to change size of images --- damus/Models/Camera/ImageResizer.swift | 40 ++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 damus/Models/Camera/ImageResizer.swift diff --git a/damus/Models/Camera/ImageResizer.swift b/damus/Models/Camera/ImageResizer.swift new file mode 100644 index 0000000000..2130d354ef --- /dev/null +++ b/damus/Models/Camera/ImageResizer.swift @@ -0,0 +1,40 @@ +// +// ImageResizer.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import UIKit + +public enum ImageResizingError: Error { + case cannotRetrieveFromURL + case cannotRetrieveFromData +} + +public struct ImageResizer { + public var targetWidth: CGFloat + + public init(targetWidth: CGFloat) { + self.targetWidth = targetWidth + } + + public func resize(at url: URL) -> UIImage? { + guard let image = UIImage(contentsOfFile: url.path) else { + return nil + } + + return self.resize(image: image) + } + + public func resize(image: UIImage) -> UIImage { + let originalSize = image.size + let targetSize = CGSize(width: targetWidth, height: targetWidth*originalSize.height/originalSize.width) + let renderer = UIGraphicsImageRenderer(size: targetSize) + return renderer.image { (context) in + image.draw(in: CGRect(origin: .zero, size: targetSize)) + } + } +} + From a8ad2d5d791edafb58b3ccbca431606d573eefd4 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:29:40 -0700 Subject: [PATCH 02/13] add PhotoCaptureProcessor and VideoCaptureProcessor for handling camera functions --- .../Models/Camera/PhotoCaptureProcessor.swift | 91 +++++++++++++++++++ .../Models/Camera/VideoCaptureProcessor.swift | 77 ++++++++++++++++ 2 files changed, 168 insertions(+) create mode 100644 damus/Models/Camera/PhotoCaptureProcessor.swift create mode 100644 damus/Models/Camera/VideoCaptureProcessor.swift diff --git a/damus/Models/Camera/PhotoCaptureProcessor.swift b/damus/Models/Camera/PhotoCaptureProcessor.swift new file mode 100644 index 0000000000..9d5956daa0 --- /dev/null +++ b/damus/Models/Camera/PhotoCaptureProcessor.swift @@ -0,0 +1,91 @@ +// +// PhotoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Photos + +class PhotoCaptureProcessor: NSObject { + private(set) var requestedPhotoSettings: AVCapturePhotoSettings + private(set) var photoOutput: AVCapturePhotoOutput? + + lazy var context = CIContext() + var photoData: Data? + private var maxPhotoProcessingTime: CMTime? + + private let willCapturePhotoAnimation: () -> Void + private let completionHandler: (PhotoCaptureProcessor) -> Void + private let photoProcessingHandler: (Bool) -> Void + + init(with requestedPhotoSettings: AVCapturePhotoSettings, + photoOutput: AVCapturePhotoOutput?, + willCapturePhotoAnimation: @escaping () -> Void, + completionHandler: @escaping (PhotoCaptureProcessor) -> Void, + photoProcessingHandler: @escaping (Bool) -> Void) { + self.requestedPhotoSettings = requestedPhotoSettings + self.willCapturePhotoAnimation = willCapturePhotoAnimation + self.completionHandler = completionHandler + self.photoProcessingHandler = photoProcessingHandler + self.photoOutput = photoOutput + } + + func capturePhoto(settings: AVCapturePhotoSettings) { + if let photoOutput = self.photoOutput { + photoOutput.capturePhoto(with: settings, delegate: self) + } + } +} + +extension PhotoCaptureProcessor: AVCapturePhotoCaptureDelegate { + func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + maxPhotoProcessingTime = resolvedSettings.photoProcessingTimeRange.start + resolvedSettings.photoProcessingTimeRange.duration + } + + func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + DispatchQueue.main.async { + self.willCapturePhotoAnimation() + } + + guard let maxPhotoProcessingTime = maxPhotoProcessingTime else { + return + } + + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + + let oneSecond = CMTime(seconds: 2, preferredTimescale: 1) + if maxPhotoProcessingTime > oneSecond { + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + DispatchQueue.main.async { + self.photoProcessingHandler(false) + } + + if let error = error { + print("Error capturing photo: \(error)") + } else { + photoData = photo.fileDataRepresentation() + + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) { + if let error = error { + print("Error capturing photo: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self) + } + } +} diff --git a/damus/Models/Camera/VideoCaptureProcessor.swift b/damus/Models/Camera/VideoCaptureProcessor.swift new file mode 100644 index 0000000000..7e9e614e8e --- /dev/null +++ b/damus/Models/Camera/VideoCaptureProcessor.swift @@ -0,0 +1,77 @@ +// +// VideoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Photos + +class VideoCaptureProcessor: NSObject { + private(set) var movieOutput: AVCaptureMovieFileOutput? + + private let beginHandler: () -> Void + private let completionHandler: (VideoCaptureProcessor, URL) -> Void + private let videoProcessingHandler: (Bool) -> Void + private var session: AVCaptureSession? + + init(movieOutput: AVCaptureMovieFileOutput?, + beginHandler: @escaping () -> Void, + completionHandler: @escaping (VideoCaptureProcessor, URL) -> Void, + videoProcessingHandler: @escaping (Bool) -> Void) { + self.beginHandler = beginHandler + self.completionHandler = completionHandler + self.videoProcessingHandler = videoProcessingHandler + self.movieOutput = movieOutput + } + + func startCapture(session: AVCaptureSession) { + if let movieOutput = self.movieOutput, session.isRunning { + let outputFileURL = uniqueOutputFileURL() + movieOutput.startRecording(to: outputFileURL, recordingDelegate: self) + } + } + + func stopCapture() { + if let movieOutput = self.movieOutput { + if movieOutput.isRecording { + movieOutput.stopRecording() + } + } + } + + private func uniqueOutputFileURL() -> URL { + let tempDirectory = FileManager.default.temporaryDirectory + let fileName = UUID().uuidString + ".mov" + return tempDirectory.appendingPathComponent(fileName) + } +} + +extension VideoCaptureProcessor: AVCaptureFileOutputRecordingDelegate { + + func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + DispatchQueue.main.async { + self.beginHandler() + } + } + + func fileOutput(_ output: AVCaptureFileOutput, willFinishRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + DispatchQueue.main.async { + self.videoProcessingHandler(true) + } + } + + func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { + if let error = error { + print("Error capturing video: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self, outputFileURL) + self.videoProcessingHandler(false) + } + } +} From a0e26b3e2e18d6343085d1262f430f7ec4e014bb Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:33:41 -0700 Subject: [PATCH 03/13] add CameraModel and CameraService for interacting with the camera --- damus/Models/Camera/CameraModel.swift | 122 +++ .../Camera/CameraService+Extensions.swift | 32 + damus/Models/Camera/CameraService.swift | 693 ++++++++++++++++++ 3 files changed, 847 insertions(+) create mode 100644 damus/Models/Camera/CameraModel.swift create mode 100644 damus/Models/Camera/CameraService+Extensions.swift create mode 100644 damus/Models/Camera/CameraService.swift diff --git a/damus/Models/Camera/CameraModel.swift b/damus/Models/Camera/CameraModel.swift new file mode 100644 index 0000000000..73def5730d --- /dev/null +++ b/damus/Models/Camera/CameraModel.swift @@ -0,0 +1,122 @@ +// +// CameraModel.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Combine + +final class CameraModel: ObservableObject { + private let service = CameraService() + + @Published var showAlertError = false + + @Published var isFlashOn = false + + @Published var willCapturePhoto = false + + @Published var isCameraButtonDisabled = false + + @Published var isPhotoProcessing = false + + @Published var isRecording = false + + @Published var captureMode: CameraMediaType = .image + + @Published public var mediaItems: [MediaItem] = [] + + @Published var thumbnail: Thumbnail! + + var alertError: AlertError! + + var session: AVCaptureSession + + private var subscriptions = Set() + + init() { + self.session = service.session + + service.$shouldShowAlertView.sink { [weak self] (val) in + self?.alertError = self?.service.alertError + self?.showAlertError = val + } + .store(in: &self.subscriptions) + + service.$flashMode.sink { [weak self] (mode) in + self?.isFlashOn = mode == .on + } + .store(in: &self.subscriptions) + + service.$willCapturePhoto.sink { [weak self] (val) in + self?.willCapturePhoto = val + } + .store(in: &self.subscriptions) + + service.$isCameraButtonDisabled.sink { [weak self] (val) in + self?.isCameraButtonDisabled = val + } + .store(in: &self.subscriptions) + + service.$isPhotoProcessing.sink { [weak self] (val) in + self?.isPhotoProcessing = val + } + .store(in: &self.subscriptions) + + service.$isRecording.sink { [weak self] (val) in + self?.isRecording = val + } + .store(in: &self.subscriptions) + + service.$captureMode.sink { [weak self] (mode) in + self?.captureMode = mode + } + .store(in: &self.subscriptions) + + service.$mediaItems.sink { [weak self] (mode) in + self?.mediaItems = mode + } + .store(in: &self.subscriptions) + + service.$thumbnail.sink { [weak self] (thumbnail) in + guard let pic = thumbnail else { return } + self?.thumbnail = pic + } + .store(in: &self.subscriptions) + } + + func configure() { + service.checkForPermissions() + service.configure() + } + + func stop() { + service.stop() + } + + func capturePhoto() { + service.capturePhoto() + } + + func startRecording() { + service.startRecording() + } + + func stopRecording() { + service.stopRecording() + } + + func flipCamera() { + service.changeCamera() + } + + func zoom(with factor: CGFloat) { + service.set(zoom: factor) + } + + func switchFlash() { + service.flashMode = service.flashMode == .on ? .off : .on + } +} diff --git a/damus/Models/Camera/CameraService+Extensions.swift b/damus/Models/Camera/CameraService+Extensions.swift new file mode 100644 index 0000000000..2c31c48847 --- /dev/null +++ b/damus/Models/Camera/CameraService+Extensions.swift @@ -0,0 +1,32 @@ +// +// CameraService+Extensions.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import UIKit +import AVFoundation + +extension AVCaptureVideoOrientation { + init?(deviceOrientation: UIDeviceOrientation) { + switch deviceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeRight + case .landscapeRight: self = .landscapeLeft + default: return nil + } + } + + init?(interfaceOrientation: UIInterfaceOrientation) { + switch interfaceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeLeft + case .landscapeRight: self = .landscapeRight + default: return nil + } + } +} diff --git a/damus/Models/Camera/CameraService.swift b/damus/Models/Camera/CameraService.swift new file mode 100644 index 0000000000..e5a617163a --- /dev/null +++ b/damus/Models/Camera/CameraService.swift @@ -0,0 +1,693 @@ +// +// CameraService.swift +// Campus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Combine +import AVFoundation +import Photos +import UIKit + +public struct Thumbnail: Identifiable, Equatable { + public var id: String + public var type: CameraMediaType + public var url: URL + + public init(id: String = UUID().uuidString, type: CameraMediaType, url: URL) { + self.id = id + self.type = type + self.url = url + } + + public var thumbnailImage: UIImage? { + switch type { + case .image: + return ImageResizer(targetWidth: 100).resize(at: url) + case .video: + return generateVideoThumbnail(for: url) + } + } +} + +public struct AlertError { + public var title: String = "" + public var message: String = "" + public var primaryButtonTitle = "Accept" + public var secondaryButtonTitle: String? + public var primaryAction: (() -> ())? + public var secondaryAction: (() -> ())? + + public init(title: String = "", message: String = "", primaryButtonTitle: String = "Accept", secondaryButtonTitle: String? = nil, primaryAction: (() -> ())? = nil, secondaryAction: (() -> ())? = nil) { + self.title = title + self.message = message + self.primaryAction = primaryAction + self.primaryButtonTitle = primaryButtonTitle + self.secondaryAction = secondaryAction + } +} + +func generateVideoThumbnail(for videoURL: URL) -> UIImage? { + let asset = AVAsset(url: videoURL) + let imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator.appliesPreferredTrackTransform = true + + do { + let cgImage = try imageGenerator.copyCGImage(at: .zero, actualTime: nil) + return UIImage(cgImage: cgImage) + } catch { + print("Error generating thumbnail: \(error)") + return nil + } +} + +public enum CameraMediaType { + case image + case video +} + +public struct MediaItem { + let url: URL + let type: CameraMediaType +} + +public class CameraService: NSObject, Identifiable { + public let session = AVCaptureSession() + + public var isSessionRunning = false + public var isConfigured = false + var setupResult: SessionSetupResult = .success + + public var alertError: AlertError = AlertError() + + @Published public var flashMode: AVCaptureDevice.FlashMode = .off + @Published public var shouldShowAlertView = false + @Published public var isPhotoProcessing = false + @Published public var captureMode: CameraMediaType = .image + @Published public var isRecording: Bool = false + + @Published public var willCapturePhoto = false + @Published public var isCameraButtonDisabled = false + @Published public var isCameraUnavailable = false + @Published public var thumbnail: Thumbnail? + @Published public var mediaItems: [MediaItem] = [] + + public let sessionQueue = DispatchQueue(label: "io.damus.camera") + + @objc dynamic public var videoDeviceInput: AVCaptureDeviceInput! + @objc dynamic public var audioDeviceInput: AVCaptureDeviceInput! + + public let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified) + + public let photoOutput = AVCapturePhotoOutput() + + public let movieOutput = AVCaptureMovieFileOutput() + + var videoCaptureProcessor: VideoCaptureProcessor? + var photoCaptureProcessor: PhotoCaptureProcessor? + + public var keyValueObservations = [NSKeyValueObservation]() + + override public init() { + super.init() + + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + + enum SessionSetupResult { + case success + case notAuthorized + case configurationFailed + } + + public func configure() { + if !self.isSessionRunning && !self.isConfigured { + sessionQueue.async { + self.configureSession() + } + } + } + + public func checkForPermissions() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + break + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in + if !granted { + self.setupResult = .notAuthorized + } + self.sessionQueue.resume() + }) + + default: + setupResult = .notAuthorized + + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Access", message: "Damus needs camera and microphone access. Enable in settings.", primaryButtonTitle: "Go to settings", secondaryButtonTitle: nil, primaryAction: { + UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!, + options: [:], completionHandler: nil) + + }, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } + } + + private func configureSession() { + if setupResult != .success { + return + } + + session.beginConfiguration() + + session.sessionPreset = .high + + // Add video input. + do { + var defaultVideoDevice: AVCaptureDevice? + + if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) { + // If a rear dual camera is not available, default to the rear wide angle camera. + defaultVideoDevice = backCameraDevice + } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) { + // If the rear wide angle camera isn't available, default to the front wide angle camera. + defaultVideoDevice = frontCameraDevice + } + + guard let videoDevice = defaultVideoDevice else { + print("Default video device is unavailable.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + print("Couldn't add video device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let audioDevice = AVCaptureDevice.default(for: .audio) + let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!) + + if session.canAddInput(audioDeviceInput) { + session.addInput(audioDeviceInput) + self.audioDeviceInput = audioDeviceInput + } else { + print("Couldn't add audio device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add video output + if session.canAddOutput(movieOutput) { + session.addOutput(movieOutput) + } else { + print("Could not add movie output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + } catch { + print("Couldn't create video device input: \(error)") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add the photo output. + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + + photoOutput.maxPhotoQualityPrioritization = .quality + + } else { + print("Could not add photo output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + session.commitConfiguration() + self.isConfigured = true + + self.start() + } + + private func resumeInterruptedSession() { + sessionQueue.async { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + if !self.session.isRunning { + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Unable to resume camera", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } else { + DispatchQueue.main.async { + self.isCameraUnavailable = false + self.isCameraButtonDisabled = false + } + } + } + } + + public func changeCamera() { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + } + + sessionQueue.async { + let currentVideoDevice = self.videoDeviceInput.device + let currentPosition = currentVideoDevice.position + + let preferredPosition: AVCaptureDevice.Position + let preferredDeviceType: AVCaptureDevice.DeviceType + + switch currentPosition { + case .unspecified, .front: + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + + case .back: + preferredPosition = .front + preferredDeviceType = .builtInWideAngleCamera + + @unknown default: + print("Unknown capture position. Defaulting to back, dual-camera.") + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + } + let devices = self.videoDeviceDiscoverySession.devices + var newVideoDevice: AVCaptureDevice? = nil + + if let device = devices.first(where: { $0.position == preferredPosition && $0.deviceType == preferredDeviceType }) { + newVideoDevice = device + } else if let device = devices.first(where: { $0.position == preferredPosition }) { + newVideoDevice = device + } + + if let videoDevice = newVideoDevice { + do { + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + self.session.beginConfiguration() + + self.session.removeInput(self.videoDeviceInput) + + if self.session.canAddInput(videoDeviceInput) { + NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice) + NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: videoDeviceInput.device) + + self.session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + self.session.addInput(self.videoDeviceInput) + } + + if let connection = self.photoOutput.connection(with: .video) { + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .auto + } + } + + self.photoOutput.maxPhotoQualityPrioritization = .quality + + self.session.commitConfiguration() + } catch { + print("Error occurred while creating video device input: \(error)") + } + } + + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + } + } + } + + public func focus(with focusMode: AVCaptureDevice.FocusMode, exposureMode: AVCaptureDevice.ExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) { + sessionQueue.async { + guard let device = self.videoDeviceInput?.device else { return } + do { + try device.lockForConfiguration() + + if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) { + device.focusPointOfInterest = devicePoint + device.focusMode = focusMode + } + + if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) { + device.exposurePointOfInterest = devicePoint + device.exposureMode = exposureMode + } + + device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange + device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } + } + + + public func focus(at focusPoint: CGPoint) { + let device = self.videoDeviceInput.device + do { + try device.lockForConfiguration() + if device.isFocusPointOfInterestSupported { + device.focusPointOfInterest = focusPoint + device.exposurePointOfInterest = focusPoint + device.exposureMode = .continuousAutoExposure + device.focusMode = .continuousAutoFocus + device.unlockForConfiguration() + } + } + catch { + print(error.localizedDescription) + } + } + + @objc public func stop(completion: (() -> ())? = nil) { + sessionQueue.async { + if self.isSessionRunning { + if self.setupResult == .success { + self.session.stopRunning() + self.isSessionRunning = self.session.isRunning + print("CAMERA STOPPED") + self.removeObservers() + + if !self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + completion?() + } + } + } + } + } + } + + @objc public func start() { + sessionQueue.async { + if !self.isSessionRunning && self.isConfigured { + switch self.setupResult { + case .success: + self.addObservers() + self.session.startRunning() + print("CAMERA RUNNING") + self.isSessionRunning = self.session.isRunning + + if self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + self.isCameraUnavailable = false + } + } + + case .notAuthorized: + print("Application not authorized to use camera") + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + + case .configurationFailed: + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Camera configuration failed. Either your device camera is not available or other application is using it", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + } + } + } + + public func set(zoom: CGFloat) { + let factor = zoom < 1 ? 1 : zoom + let device = self.videoDeviceInput.device + + do { + try device.lockForConfiguration() + device.videoZoomFactor = factor + device.unlockForConfiguration() + } + catch { + print(error.localizedDescription) + } + } + + public func capturePhoto() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video) { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + var photoSettings = AVCapturePhotoSettings() + + // Capture HEIF photos when supported. Enable according to user settings and high-resolution photos. + if (self.photoOutput.availablePhotoCodecTypes.contains(.hevc)) { + photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + } + + if self.videoDeviceInput.device.isFlashAvailable { + photoSettings.flashMode = self.flashMode + } + + if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty { + photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!] + } + + photoSettings.photoQualityPrioritization = .speed + + if self.photoCaptureProcessor == nil { + self.photoCaptureProcessor = PhotoCaptureProcessor(with: photoSettings, photoOutput: self.photoOutput, willCapturePhotoAnimation: { + DispatchQueue.main.async { + self.willCapturePhoto.toggle() + self.willCapturePhoto.toggle() + } + }, completionHandler: { (photoCaptureProcessor) in + if let data = photoCaptureProcessor.photoData { + let url = self.savePhoto(data: data) + if let unwrappedURL = url { + self.thumbnail = Thumbnail(type: .image, url: unwrappedURL) + } + } else { + print("Data for photo not found") + } + + self.isCameraButtonDisabled = false + }, photoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.photoCaptureProcessor?.capturePhoto(settings: photoSettings) + } + } + } + + public func startRecording() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let videoOutputConnection = self.movieOutput.connection(with: .video) { + videoOutputConnection.videoOrientation = videoPreviewLayerOrientation + + var videoSettings = [String: Any]() + + if self.movieOutput.availableVideoCodecTypes.contains(.hevc) == true { + videoSettings[AVVideoCodecKey] = AVVideoCodecType.hevc + self.movieOutput.setOutputSettings(videoSettings, for: videoOutputConnection) + } + } + + if self.videoCaptureProcessor == nil { + self.videoCaptureProcessor = VideoCaptureProcessor(movieOutput: self.movieOutput, beginHandler: { + self.isRecording = true + }, completionHandler: { (videoCaptureProcessor, outputFileURL) in + self.isCameraButtonDisabled = false + self.captureMode = .image + + self.mediaItems.append(MediaItem(url: outputFileURL, type: .video)) + self.thumbnail = Thumbnail(type: .video, url: outputFileURL) + }, videoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.videoCaptureProcessor?.startCapture(session: self.session) + } + } + } + + func stopRecording() { + if let videoCaptureProcessor = self.videoCaptureProcessor { + isRecording = false + videoCaptureProcessor.stopCapture() + } + } + + func savePhoto(imageType: String = "jpeg", data: Data) -> URL? { + guard let uiImage = UIImage(data: data) else { + print("Error converting media data to UIImage") + return nil + } + + guard let compressedData = uiImage.jpegData(compressionQuality: 0.8) else { + print("Error converting UIImage to JPEG data") + return nil + } + + let temporaryDirectory = NSTemporaryDirectory() + let tempFileName = "\(UUID().uuidString).\(imageType)" + let tempFileURL = URL(fileURLWithPath: temporaryDirectory).appendingPathComponent(tempFileName) + + do { + try compressedData.write(to: tempFileURL) + self.mediaItems.append(MediaItem(url: tempFileURL, type: .image)) + return tempFileURL + } catch { + print("Error saving image data to temporary URL: \(error.localizedDescription)") + } + return nil + } + + private func addObservers() { + let systemPressureStateObservation = observe(\.videoDeviceInput.device.systemPressureState, options: .new) { _, change in + guard let systemPressureState = change.newValue else { return } + self.setRecommendedFrameRateRangeForPressureState(systemPressureState: systemPressureState) + } + keyValueObservations.append(systemPressureStateObservation) + +// NotificationCenter.default.addObserver(self, selector: #selector(self.onOrientationChange), name: UIDevice.orientationDidChangeNotification, object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(subjectAreaDidChange), + name: .AVCaptureDeviceSubjectAreaDidChange, + object: videoDeviceInput.device) + + NotificationCenter.default.addObserver(self, selector: #selector(uiRequestedNewFocusArea), name: .init(rawValue: "UserDidRequestNewFocusPoint"), object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionRuntimeError), + name: .AVCaptureSessionRuntimeError, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionWasInterrupted), + name: .AVCaptureSessionWasInterrupted, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionInterruptionEnded), + name: .AVCaptureSessionInterruptionEnded, + object: session) + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self) + + for keyValueObservation in keyValueObservations { + keyValueObservation.invalidate() + } + keyValueObservations.removeAll() + } + + @objc private func uiRequestedNewFocusArea(notification: NSNotification) { + guard let userInfo = notification.userInfo as? [String: Any], let devicePoint = userInfo["devicePoint"] as? CGPoint else { return } + self.focus(at: devicePoint) + } + + @objc + private func subjectAreaDidChange(notification: NSNotification) { + let devicePoint = CGPoint(x: 0.5, y: 0.5) + focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false) + } + + @objc + private func sessionRuntimeError(notification: NSNotification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + } + } + } + + private func setRecommendedFrameRateRangeForPressureState(systemPressureState: AVCaptureDevice.SystemPressureState) { + let pressureLevel = systemPressureState.level + if pressureLevel == .serious || pressureLevel == .critical { + do { + try self.videoDeviceInput.device.lockForConfiguration() + print("WARNING: Reached elevated system pressure level: \(pressureLevel). Throttling frame rate.") + self.videoDeviceInput.device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 20) + self.videoDeviceInput.device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 15) + self.videoDeviceInput.device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } else if pressureLevel == .shutdown { + print("Session stopped running due to shutdown system pressure level.") + } + } + + @objc + private func sessionWasInterrupted(notification: NSNotification) { + DispatchQueue.main.async { + self.isCameraUnavailable = true + } + + if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, + let reasonIntegerValue = userInfoValue.integerValue, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) { + print("Capture session was interrupted with reason \(reason)") + + if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient { + print("Session stopped running due to video devies in use by another client.") + } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps { + print("Session stopped running due to video devies is not available with multiple foreground apps.") + } else if reason == .videoDeviceNotAvailableDueToSystemPressure { + print("Session stopped running due to shutdown system pressure level.") + } + } + } + + @objc + private func sessionInterruptionEnded(notification: NSNotification) { + print("Capture session interruption ended") + DispatchQueue.main.async { + self.isCameraUnavailable = false + } + } +} From 1f9b3c3b0e12b16e4d9f7fd805057ebb256dccdd Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:35:52 -0700 Subject: [PATCH 04/13] add CameraPreview for displaying the view the camera is reading --- damus/Views/Camera/CameraPreview.swift | 95 ++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 damus/Views/Camera/CameraPreview.swift diff --git a/damus/Views/Camera/CameraPreview.swift b/damus/Views/Camera/CameraPreview.swift new file mode 100644 index 0000000000..48b4d98a66 --- /dev/null +++ b/damus/Views/Camera/CameraPreview.swift @@ -0,0 +1,95 @@ +// +// CameraPreview.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import UIKit +import AVFoundation +import SwiftUI + +public struct CameraPreview: UIViewRepresentable { + public class VideoPreviewView: UIView { + public override class var layerClass: AnyClass { + AVCaptureVideoPreviewLayer.self + } + + var videoPreviewLayer: AVCaptureVideoPreviewLayer { + return layer as! AVCaptureVideoPreviewLayer + } + + let focusView: UIView = { + let focusView = UIView(frame: CGRect(x: 0, y: 0, width: 30, height: 30)) + focusView.layer.borderColor = UIColor.white.cgColor + focusView.layer.borderWidth = 1.5 + focusView.layer.cornerRadius = 15 + focusView.layer.opacity = 0 + focusView.backgroundColor = .clear + return focusView + }() + + @objc func focusAndExposeTap(gestureRecognizer: UITapGestureRecognizer) { + let layerPoint = gestureRecognizer.location(in: gestureRecognizer.view) + + guard layerPoint.x >= 0 && layerPoint.x <= bounds.width && + layerPoint.y >= 0 && layerPoint.y <= bounds.height else { + return + } + + let devicePoint = videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: layerPoint) + + self.focusView.layer.frame = CGRect(origin: layerPoint, size: CGSize(width: 30, height: 30)) + + NotificationCenter.default.post(.init(name: .init("UserDidRequestNewFocusPoint"), object: nil, userInfo: ["devicePoint": devicePoint] as [AnyHashable: Any])) + + UIView.animate(withDuration: 0.3, animations: { + self.focusView.layer.opacity = 1 + }) { (completed) in + if completed { + UIView.animate(withDuration: 0.3) { + self.focusView.layer.opacity = 0 + } + } + } + } + + public override func layoutSubviews() { + super.layoutSubviews() + + videoPreviewLayer.videoGravity = .resizeAspectFill + + self.layer.addSublayer(focusView.layer) + + let gRecognizer = UITapGestureRecognizer(target: self, action: #selector(VideoPreviewView.focusAndExposeTap(gestureRecognizer:))) + self.addGestureRecognizer(gRecognizer) + } + } + + public let session: AVCaptureSession + + public init(session: AVCaptureSession) { + self.session = session + } + + public func makeUIView(context: Context) -> VideoPreviewView { + let viewFinder = VideoPreviewView() + viewFinder.backgroundColor = .black + viewFinder.videoPreviewLayer.cornerRadius = 20 + viewFinder.videoPreviewLayer.session = session + viewFinder.videoPreviewLayer.connection?.videoOrientation = .portrait + + return viewFinder + } + + public func updateUIView(_ uiView: VideoPreviewView, context: Context) { + + } +} + +struct CameraPreview_Previews: PreviewProvider { + static var previews: some View { + CameraPreview(session: AVCaptureSession()) + .frame(height: 300) + } +} From 5cf6e0e93eef01d5c1e1240ad82991a5573aecc8 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:38:41 -0700 Subject: [PATCH 05/13] change the camera view to the new custom camera --- damus.xcodeproj/project.pbxproj | 54 ++++++- damus/Views/Camera/CameraView.swift | 212 ++++++++++++++++++++++++++++ damus/Views/PostView.swift | 42 +++--- 3 files changed, 285 insertions(+), 23 deletions(-) create mode 100644 damus/Views/Camera/CameraView.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index c1686283d7..2df783f542 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -377,9 +377,17 @@ 9609F058296E220800069BF3 /* BannerImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9609F057296E220800069BF3 /* BannerImageView.swift */; }; 9C83F89329A937B900136C08 /* TextViewWrapper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9C83F89229A937B900136C08 /* TextViewWrapper.swift */; }; 9CA876E229A00CEA0003B9A3 /* AttachMediaUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */; }; + BA27222E2A806E39004CDF52 /* VideoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */; }; + BA3BF2892A7F156B00600232 /* PhotoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */; }; + BA3BF28A2A7F156B00600232 /* ImageResizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2852A7F156900600232 /* ImageResizer.swift */; }; + BA3BF28B2A7F156B00600232 /* CameraService+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */; }; + BA3BF28C2A7F156B00600232 /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF2872A7F156A00600232 /* CameraService.swift */; }; + BA3BF28F2A7F1B2D00600232 /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */; }; BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; + BA8A4F0F2A2D95F70045C48C /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA8A4F0E2A2D95F70045C48C /* CameraView.swift */; }; + BA8A4F132A2D96AD0045C48C /* CameraPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA8A4F122A2D96AD0045C48C /* CameraPreview.swift */; }; BAB68BED29543FA3007BA466 /* SelectWalletView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BAB68BEC29543FA3007BA466 /* SelectWalletView.swift */; }; D2277EEA2A089BD5006C3807 /* Router.swift in Sources */ = {isa = PBXBuildFile; fileRef = D2277EE92A089BD5006C3807 /* Router.swift */; }; D78525252A7B2EA4002FA637 /* NoteContentViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D78525242A7B2EA4002FA637 /* NoteContentViewTests.swift */; }; @@ -928,9 +936,17 @@ 9609F057296E220800069BF3 /* BannerImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BannerImageView.swift; sourceTree = ""; }; 9C83F89229A937B900136C08 /* TextViewWrapper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextViewWrapper.swift; sourceTree = ""; }; 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttachMediaUtility.swift; sourceTree = ""; }; + BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCaptureProcessor.swift; sourceTree = ""; }; + BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureProcessor.swift; sourceTree = ""; }; + BA3BF2852A7F156900600232 /* ImageResizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageResizer.swift; sourceTree = ""; }; + BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraService+Extensions.swift"; sourceTree = ""; }; + BA3BF2872A7F156A00600232 /* CameraService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = ""; }; + BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = ""; }; BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; + BA8A4F0E2A2D95F70045C48C /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; + BA8A4F122A2D96AD0045C48C /* CameraPreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPreview.swift; sourceTree = ""; }; BAB68BEC29543FA3007BA466 /* SelectWalletView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SelectWalletView.swift; sourceTree = ""; }; D2277EE92A089BD5006C3807 /* Router.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Router.swift; sourceTree = ""; }; D78525242A7B2EA4002FA637 /* NoteContentViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoteContentViewTests.swift; sourceTree = ""; }; @@ -1071,6 +1087,7 @@ 4C0A3F8D280F63FF000448DE /* Models */ = { isa = PBXGroup; children = ( + BA3BF2832A7F151300600232 /* Camera */, 4C190F1E2A535FC200027FD5 /* Zaps */, 4C54AA0829A55416003E4487 /* Notifications */, 3AA247FC297E3CFF0090C62D /* RepostsModel.swift */, @@ -1236,6 +1253,7 @@ children = ( F71694E82A66221E001F4053 /* Onboarding */, 4C190F232A547D1700027FD5 /* NostrScript */, + BA8A4F0D2A2D95880045C48C /* Camera */, 4C7D09692A0AEA0400943473 /* CodeScanner */, 4C7D095A2A098C5C00943473 /* Wallet */, 4C8D1A6D29F31E4100ACDF75 /* Buttons */, @@ -1860,6 +1878,28 @@ path = Extensions; sourceTree = ""; }; + BA3BF2832A7F151300600232 /* Camera */ = { + isa = PBXGroup; + children = ( + BA27222D2A806E39004CDF52 /* VideoCaptureProcessor.swift */, + BA3BF2872A7F156A00600232 /* CameraService.swift */, + BA3BF2862A7F156A00600232 /* CameraService+Extensions.swift */, + BA3BF2852A7F156900600232 /* ImageResizer.swift */, + BA3BF2842A7F156900600232 /* PhotoCaptureProcessor.swift */, + BA3BF28E2A7F1B2D00600232 /* CameraModel.swift */, + ); + path = Camera; + sourceTree = ""; + }; + BA8A4F0D2A2D95880045C48C /* Camera */ = { + isa = PBXGroup; + children = ( + BA8A4F0E2A2D95F70045C48C /* CameraView.swift */, + BA8A4F122A2D96AD0045C48C /* CameraPreview.swift */, + ); + path = Camera; + sourceTree = ""; + }; F71694E82A66221E001F4053 /* Onboarding */ = { isa = PBXGroup; children = ( @@ -2110,6 +2150,7 @@ 4C190F252A547D2000027FD5 /* LoadScript.swift in Sources */, 4C59B98C2A76C2550032FFEB /* ProfileUpdatedNotify.swift in Sources */, 4C363A8C28236B92006E126D /* PubkeyView.swift in Sources */, + BA3BF28A2A7F156B00600232 /* ImageResizer.swift in Sources */, 4CDA128A29E9D10C0006FA5A /* SignalView.swift in Sources */, 4C12535C2A76CA540004F4B8 /* LoginNotify.swift in Sources */, 4C5C7E68284ED36500A22DF5 /* SearchHomeModel.swift in Sources */, @@ -2184,6 +2225,7 @@ 4CA2EFA0280E37AC0044ACD8 /* TimelineView.swift in Sources */, 4C30AC7629A5770900E2BD5A /* NotificationItemView.swift in Sources */, 4C86F7C42A76C44C00EC0817 /* ZappingNotify.swift in Sources */, + BA27222E2A806E39004CDF52 /* VideoCaptureProcessor.swift in Sources */, 4C363A8428233689006E126D /* Parser.swift in Sources */, 3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */, 4CE4F9E328528C5200C00DD9 /* AddRelayView.swift in Sources */, @@ -2276,6 +2318,7 @@ 4C4E137D2A76D63600BDD832 /* UnmuteThreadNotify.swift in Sources */, 4CE4F0F829DB7399005914DB /* ThiccDivider.swift in Sources */, 4CE0E2B629A3ED5500DB4CA2 /* InnerTimelineView.swift in Sources */, + BA3BF28C2A7F156B00600232 /* CameraService.swift in Sources */, 4C363A8828236948006E126D /* BlocksView.swift in Sources */, 4C06670628FCB08600038D2A /* ImageCarousel.swift in Sources */, 3A23838E2A297DD200E5AA2E /* ZapButtonModel.swift in Sources */, @@ -2283,6 +2326,7 @@ 4C1D4FB12A7958E60024F453 /* VersionInfo.swift in Sources */, 5053ACA72A56DF3B00851AE3 /* DeveloperSettingsView.swift in Sources */, F79C7FAD29D5E9620000F946 /* EditPictureControl.swift in Sources */, + BA3BF2892A7F156B00600232 /* PhotoCaptureProcessor.swift in Sources */, 4C9F18E229AA9B6C008C55EC /* CustomizeZapView.swift in Sources */, 4C2859602A12A2BE004746F7 /* SupporterBadge.swift in Sources */, 4C1A9A2A29DDF54400516EAC /* DamusVideoPlayer.swift in Sources */, @@ -2355,6 +2399,7 @@ 4C3EA66028FF5E7700C48A62 /* node_id.c in Sources */, 4C687C212A5F7ED00092C550 /* DamusBackground.swift in Sources */, 4CA352A02A76AE80003BB08B /* Notify.swift in Sources */, + BA3BF28B2A7F156B00600232 /* CameraService+Extensions.swift in Sources */, 4CE6DEE727F7A08100C66700 /* damusApp.swift in Sources */, 4C1253582A76C9060004F4B8 /* PresentSheetNotify.swift in Sources */, 4C363A962827096D006E126D /* PostBlock.swift in Sources */, @@ -2399,6 +2444,7 @@ 5C513FBA297F72980072348F /* CustomPicker.swift in Sources */, 4C1253622A76D00B0004F4B8 /* PostNotify.swift in Sources */, 4CACA9D5280C31E100D9BBE8 /* ReplyView.swift in Sources */, + BA3BF28F2A7F1B2D00600232 /* CameraModel.swift in Sources */, F7908E92298B0F0700AB113A /* RelayDetailView.swift in Sources */, 4C9147002A2A891E00DDEA40 /* error.c in Sources */, 4CE879552996BAB900F758CC /* RelayPaidDetail.swift in Sources */, @@ -2408,6 +2454,7 @@ 4CF0ABD42980996B00D66079 /* Report.swift in Sources */, 4C06670B28FDE64700038D2A /* damus.c in Sources */, 4C1253642A76D08F0004F4B8 /* ReportNotify.swift in Sources */, + BA8A4F0F2A2D95F70045C48C /* CameraView.swift in Sources */, 4C1A9A2529DDDF2600516EAC /* ZapSettingsView.swift in Sources */, 4C2CDDF7299D4A5E00879FD5 /* Debouncer.swift in Sources */, 3AAA95CC298E07E900F3D526 /* DeepLPlan.swift in Sources */, @@ -2433,6 +2480,7 @@ 3AA59D1D2999B0400061C48E /* DraftsModel.swift in Sources */, 3169CAED294FCCFC00EE4006 /* Constants.swift in Sources */, 4C9AA14A2A4587A6003F49FD /* NotificationStatusModel.swift in Sources */, + BA8A4F132A2D96AD0045C48C /* CameraPreview.swift in Sources */, 4CB9D4A72992D02B00A9A7E4 /* ProfileNameView.swift in Sources */, 4CE4F0F429D779B5005914DB /* PostBox.swift in Sources */, 4C9B0DF32A65C46800CBDA21 /* ProfileEditButton.swift in Sources */, @@ -2744,7 +2792,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; @@ -2770,7 +2818,7 @@ "$(PROJECT_DIR)", ); MARKETING_VERSION = 1.6; - PRODUCT_BUNDLE_IDENTIFIER = com.jb55.damus2; + PRODUCT_BUNDLE_IDENTIFIER = com.suhail.damus2; PRODUCT_NAME = "$(TARGET_NAME)"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; @@ -2793,7 +2841,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; diff --git a/damus/Views/Camera/CameraView.swift b/damus/Views/Camera/CameraView.swift new file mode 100644 index 0000000000..c021bb710c --- /dev/null +++ b/damus/Views/Camera/CameraView.swift @@ -0,0 +1,212 @@ +// +// CameraView.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import SwiftUI +import Combine +import AVFoundation + +struct CameraView: View { + let damus_state: DamusState + let action: (([MediaItem]) -> Void) + + @Environment(\.presentationMode) var presentationMode + + @StateObject var model: CameraModel + + @State var currentZoomFactor: CGFloat = 1.0 + + public init(damus_state: DamusState, action: @escaping (([MediaItem]) -> Void)) { + self.damus_state = damus_state + self.action = action + _model = StateObject(wrappedValue: CameraModel()) + } + + var captureButton: some View { + Button { + if model.isRecording { + withAnimation { + model.stopRecording() + } + } else { + withAnimation { + model.capturePhoto() + } + } + UIImpactFeedbackGenerator(style: .medium).impactOccurred() + } label: { + ZStack { + Circle() + .fill( model.isRecording ? .red : DamusColors.black) + .frame(width: model.isRecording ? 85 : 65, height: model.isRecording ? 85 : 65, alignment: .center) + + Circle() + .stroke( model.isRecording ? .red : DamusColors.white, lineWidth: 4) + .frame(width: model.isRecording ? 95 : 75, height: model.isRecording ? 95 : 75, alignment: .center) + } + .frame(alignment: .center) + } + .simultaneousGesture( + LongPressGesture(minimumDuration: 0.5).onEnded({ value in + if (!model.isCameraButtonDisabled) { + withAnimation { + model.startRecording() + model.captureMode = .video + } + } + }) + ) + .buttonStyle(.plain) + } + + var capturedPhotoThumbnail: some View { + ZStack { + if model.thumbnail != nil { + Image(uiImage: model.thumbnail.thumbnailImage!) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 60, height: 60) + .clipShape(RoundedRectangle(cornerRadius: 10, style: .continuous)) + } + if model.isPhotoProcessing { + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: DamusColors.white)) + } + } + } + + var closeButton: some View { + Button { + presentationMode.wrappedValue.dismiss() + model.stop() + } label: { + HStack { + Image(systemName: "xmark") + .font(.system(size: 24)) + } + .frame(minWidth: 40, minHeight: 40) + } + .accentColor(DamusColors.white) + } + + var flipCameraButton: some View { + Button(action: { + model.flipCamera() + }, label: { + HStack { + Image(systemName: "camera.rotate.fill") + .font(.system(size: 20)) + } + .frame(minWidth: 40, minHeight: 40) + }) + .accentColor(DamusColors.white) + } + + var toggleFlashButton: some View { + Button(action: { + model.switchFlash() + }, label: { + HStack { + Image(systemName: model.isFlashOn ? "bolt.fill" : "bolt.slash.fill") + .font(.system(size: 20)) + } + .frame(minWidth: 40, minHeight: 40) + }) + .accentColor(model.isFlashOn ? .yellow : DamusColors.white) + } + + var body: some View { + NavigationView { + GeometryReader { reader in + ZStack { + DamusColors.black.edgesIgnoringSafeArea(.all) + + CameraPreview(session: model.session) + .padding(.bottom, 175) + .edgesIgnoringSafeArea(.all) + .gesture( + DragGesture().onChanged({ (val) in + if abs(val.translation.height) > abs(val.translation.width) { + let percentage: CGFloat = -(val.translation.height / reader.size.height) + let calc = currentZoomFactor + percentage + let zoomFactor: CGFloat = min(max(calc, 1), 5) + + currentZoomFactor = zoomFactor + model.zoom(with: zoomFactor) + } + }) + ) + .onAppear { + model.configure() + } + .alert(isPresented: $model.showAlertError, content: { + Alert(title: Text(model.alertError.title), message: Text(model.alertError.message), dismissButton: .default(Text(model.alertError.primaryButtonTitle), action: { + model.alertError.primaryAction?() + })) + }) + .overlay( + Group { + if model.willCapturePhoto { + Color.black + } + } + ) + + VStack { + if !model.isRecording { + HStack { + closeButton + + Spacer() + + HStack { + flipCameraButton + toggleFlashButton + } + } + .padding(.horizontal, 20) + } + + Spacer() + + HStack(alignment: .center) { + if !model.mediaItems.isEmpty { + NavigationLink(destination: Text(model.mediaItems.map { $0.url.absoluteString }.joined(separator: ", "))) { + capturedPhotoThumbnail + } + .frame(width: 100, alignment: .leading) + } + + Spacer() + + captureButton + + Spacer() + + if !model.mediaItems.isEmpty { + Button(action: { + action(model.mediaItems) + presentationMode.wrappedValue.dismiss() + model.stop() + }) { + Text("Upload") + .frame(width: 100, height: 40, alignment: .center) + .foregroundColor(DamusColors.white) + .overlay { + RoundedRectangle(cornerRadius: 24) + .stroke(DamusColors.white, lineWidth: 2) + } + } + } + } + .frame(height: 100) + .padding([.horizontal, .vertical], 20) + } + } + } + } + } +} diff --git a/damus/Views/PostView.swift b/damus/Views/PostView.swift index 8c7897087e..d2fde5597f 100644 --- a/damus/Views/PostView.swift +++ b/damus/Views/PostView.swift @@ -56,7 +56,7 @@ struct PostView: View { @State var newCursorIndex: Int? @State var postTextViewCanScroll: Bool = true - @State var mediaToUpload: MediaUpload? = nil + @State var mediaToUpload: [MediaUpload] = [] @StateObject var image_upload: ImageUploadModel = ImageUploadModel() @StateObject var tagModel: TagModel = TagModel() @@ -325,6 +325,15 @@ struct PostView: View { pks.append(pk) } } + + func addToMediaToUpload(mediaItem: MediaItem) { + switch mediaItem.type { + case .image: + mediaToUpload.append(.image(mediaItem.url)) + case .video: + mediaToUpload.append(.video(mediaItem.url)) + } + } var body: some View { GeometryReader { (deviceSize: GeometryProxy) in @@ -363,36 +372,29 @@ struct PostView: View { } .sheet(isPresented: $attach_media) { ImagePicker(uploader: damus_state.settings.default_media_uploader, sourceType: .photoLibrary, pubkey: damus_state.pubkey, image_upload_confirm: $image_upload_confirm) { img in - self.mediaToUpload = .image(img) + self.mediaToUpload.append(.image(img)) } onVideoPicked: { url in - self.mediaToUpload = .video(url) + self.mediaToUpload.append(.video(url)) } .alert(NSLocalizedString("Are you sure you want to upload this media?", comment: "Alert message asking if the user wants to upload media."), isPresented: $image_upload_confirm) { Button(NSLocalizedString("Upload", comment: "Button to proceed with uploading."), role: .none) { - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) + if !mediaToUpload.isEmpty { + self.handle_upload(media: mediaToUpload[0]) self.attach_media = false } } Button(NSLocalizedString("Cancel", comment: "Button to cancel the upload."), role: .cancel) {} } } - .sheet(isPresented: $attach_camera) { - - ImagePicker(uploader: damus_state.settings.default_media_uploader, sourceType: .camera, pubkey: damus_state.pubkey, image_upload_confirm: $image_upload_confirm) { img in - self.mediaToUpload = .image(img) - } onVideoPicked: { url in - self.mediaToUpload = .video(url) - } - .alert(NSLocalizedString("Are you sure you want to upload this media?", comment: "Alert message asking if the user wants to upload media."), isPresented: $image_upload_confirm) { - Button(NSLocalizedString("Upload", comment: "Button to proceed with uploading."), role: .none) { - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) - self.attach_camera = false - } + .fullScreenCover(isPresented: $attach_camera) { + CameraView(damus_state: damus_state, action: { items in + for item in items { + addToMediaToUpload(mediaItem: item) } - Button(NSLocalizedString("Cancel", comment: "Button to cancel the upload."), role: .cancel) {} - } + for media in mediaToUpload { + self.handle_upload(media: media) + } + }) } .onAppear() { let loaded_draft = load_draft() From 8ac38feac13eb9080895fe569731496ecb8d8612 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:29:40 -0700 Subject: [PATCH 06/13] add PhotoCaptureProcessor and VideoCaptureProcessor for handling camera functions --- .../Models/Camera/PhotoCaptureProcessor.swift | 91 +++++++++++++++++++ .../Models/Camera/VideoCaptureProcessor.swift | 77 ++++++++++++++++ 2 files changed, 168 insertions(+) create mode 100644 damus/Models/Camera/PhotoCaptureProcessor.swift create mode 100644 damus/Models/Camera/VideoCaptureProcessor.swift diff --git a/damus/Models/Camera/PhotoCaptureProcessor.swift b/damus/Models/Camera/PhotoCaptureProcessor.swift new file mode 100644 index 0000000000..9d5956daa0 --- /dev/null +++ b/damus/Models/Camera/PhotoCaptureProcessor.swift @@ -0,0 +1,91 @@ +// +// PhotoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Photos + +class PhotoCaptureProcessor: NSObject { + private(set) var requestedPhotoSettings: AVCapturePhotoSettings + private(set) var photoOutput: AVCapturePhotoOutput? + + lazy var context = CIContext() + var photoData: Data? + private var maxPhotoProcessingTime: CMTime? + + private let willCapturePhotoAnimation: () -> Void + private let completionHandler: (PhotoCaptureProcessor) -> Void + private let photoProcessingHandler: (Bool) -> Void + + init(with requestedPhotoSettings: AVCapturePhotoSettings, + photoOutput: AVCapturePhotoOutput?, + willCapturePhotoAnimation: @escaping () -> Void, + completionHandler: @escaping (PhotoCaptureProcessor) -> Void, + photoProcessingHandler: @escaping (Bool) -> Void) { + self.requestedPhotoSettings = requestedPhotoSettings + self.willCapturePhotoAnimation = willCapturePhotoAnimation + self.completionHandler = completionHandler + self.photoProcessingHandler = photoProcessingHandler + self.photoOutput = photoOutput + } + + func capturePhoto(settings: AVCapturePhotoSettings) { + if let photoOutput = self.photoOutput { + photoOutput.capturePhoto(with: settings, delegate: self) + } + } +} + +extension PhotoCaptureProcessor: AVCapturePhotoCaptureDelegate { + func photoOutput(_ output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + maxPhotoProcessingTime = resolvedSettings.photoProcessingTimeRange.start + resolvedSettings.photoProcessingTimeRange.duration + } + + func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) { + DispatchQueue.main.async { + self.willCapturePhotoAnimation() + } + + guard let maxPhotoProcessingTime = maxPhotoProcessingTime else { + return + } + + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + + let oneSecond = CMTime(seconds: 2, preferredTimescale: 1) + if maxPhotoProcessingTime > oneSecond { + DispatchQueue.main.async { + self.photoProcessingHandler(true) + } + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { + DispatchQueue.main.async { + self.photoProcessingHandler(false) + } + + if let error = error { + print("Error capturing photo: \(error)") + } else { + photoData = photo.fileDataRepresentation() + + } + } + + func photoOutput(_ output: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) { + if let error = error { + print("Error capturing photo: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self) + } + } +} diff --git a/damus/Models/Camera/VideoCaptureProcessor.swift b/damus/Models/Camera/VideoCaptureProcessor.swift new file mode 100644 index 0000000000..7e9e614e8e --- /dev/null +++ b/damus/Models/Camera/VideoCaptureProcessor.swift @@ -0,0 +1,77 @@ +// +// VideoCaptureProcessor.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Photos + +class VideoCaptureProcessor: NSObject { + private(set) var movieOutput: AVCaptureMovieFileOutput? + + private let beginHandler: () -> Void + private let completionHandler: (VideoCaptureProcessor, URL) -> Void + private let videoProcessingHandler: (Bool) -> Void + private var session: AVCaptureSession? + + init(movieOutput: AVCaptureMovieFileOutput?, + beginHandler: @escaping () -> Void, + completionHandler: @escaping (VideoCaptureProcessor, URL) -> Void, + videoProcessingHandler: @escaping (Bool) -> Void) { + self.beginHandler = beginHandler + self.completionHandler = completionHandler + self.videoProcessingHandler = videoProcessingHandler + self.movieOutput = movieOutput + } + + func startCapture(session: AVCaptureSession) { + if let movieOutput = self.movieOutput, session.isRunning { + let outputFileURL = uniqueOutputFileURL() + movieOutput.startRecording(to: outputFileURL, recordingDelegate: self) + } + } + + func stopCapture() { + if let movieOutput = self.movieOutput { + if movieOutput.isRecording { + movieOutput.stopRecording() + } + } + } + + private func uniqueOutputFileURL() -> URL { + let tempDirectory = FileManager.default.temporaryDirectory + let fileName = UUID().uuidString + ".mov" + return tempDirectory.appendingPathComponent(fileName) + } +} + +extension VideoCaptureProcessor: AVCaptureFileOutputRecordingDelegate { + + func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + DispatchQueue.main.async { + self.beginHandler() + } + } + + func fileOutput(_ output: AVCaptureFileOutput, willFinishRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { + DispatchQueue.main.async { + self.videoProcessingHandler(true) + } + } + + func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { + if let error = error { + print("Error capturing video: \(error)") + return + } + + DispatchQueue.main.async { + self.completionHandler(self, outputFileURL) + self.videoProcessingHandler(false) + } + } +} From 00ba8f9d0953eddcf0664e19f10a7f811966c538 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Thu, 21 Sep 2023 12:18:35 -0700 Subject: [PATCH 07/13] add PhotoCaptureProcessor and VideoCaptureProcessor for handling camera functions --- damus.xcodeproj/project.pbxproj | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 9759c819b3..18e32e922d 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -378,6 +378,8 @@ 9C83F89329A937B900136C08 /* TextViewWrapper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9C83F89229A937B900136C08 /* TextViewWrapper.swift */; }; 9CA876E229A00CEA0003B9A3 /* AttachMediaUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */; }; BA37598A2ABCCDE40018D73B /* ImageResizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759892ABCCDE30018D73B /* ImageResizer.swift */; }; + BA37598D2ABCCE500018D73B /* PhotoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */; }; + BA37598E2ABCCE500018D73B /* VideoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */; }; BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; @@ -930,6 +932,8 @@ 9C83F89229A937B900136C08 /* TextViewWrapper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextViewWrapper.swift; sourceTree = ""; }; 9CA876E129A00CE90003B9A3 /* AttachMediaUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttachMediaUtility.swift; sourceTree = ""; }; BA3759892ABCCDE30018D73B /* ImageResizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageResizer.swift; sourceTree = ""; }; + BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureProcessor.swift; sourceTree = ""; }; + BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCaptureProcessor.swift; sourceTree = ""; }; BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; @@ -1867,6 +1871,8 @@ isa = PBXGroup; children = ( BA3759892ABCCDE30018D73B /* ImageResizer.swift */, + BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */, + BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */, ); path = Camera; sourceTree = ""; @@ -2298,6 +2304,7 @@ 4C2859602A12A2BE004746F7 /* SupporterBadge.swift in Sources */, 4C1A9A2A29DDF54400516EAC /* DamusVideoPlayer.swift in Sources */, 4CA352A22A76AEC5003BB08B /* LikedNotify.swift in Sources */, + BA37598D2ABCCE500018D73B /* PhotoCaptureProcessor.swift in Sources */, 4C9146FD2A2A87C200DDEA40 /* wasm.c in Sources */, 4C75EFAF28049D350006080F /* NostrFilter.swift in Sources */, 4C3EA64C28FF59AC00C48A62 /* bech32_util.c in Sources */, @@ -2447,6 +2454,7 @@ 4C9AA14A2A4587A6003F49FD /* NotificationStatusModel.swift in Sources */, 4CB9D4A72992D02B00A9A7E4 /* ProfileNameView.swift in Sources */, 4CE4F0F429D779B5005914DB /* PostBox.swift in Sources */, + BA37598E2ABCCE500018D73B /* VideoCaptureProcessor.swift in Sources */, 4C9B0DF32A65C46800CBDA21 /* ProfileEditButton.swift in Sources */, 4C2859622A12A7F0004746F7 /* GoldSupportGradient.swift in Sources */, ); From f141040259e039d5e5da2c719692ee96435e8de9 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:33:41 -0700 Subject: [PATCH 08/13] add CameraModel and CameraService for interacting with the camera --- damus/Models/Camera/CameraModel.swift | 122 +++ .../Camera/CameraService+Extensions.swift | 32 + damus/Models/Camera/CameraService.swift | 693 ++++++++++++++++++ 3 files changed, 847 insertions(+) create mode 100644 damus/Models/Camera/CameraModel.swift create mode 100644 damus/Models/Camera/CameraService+Extensions.swift create mode 100644 damus/Models/Camera/CameraService.swift diff --git a/damus/Models/Camera/CameraModel.swift b/damus/Models/Camera/CameraModel.swift new file mode 100644 index 0000000000..73def5730d --- /dev/null +++ b/damus/Models/Camera/CameraModel.swift @@ -0,0 +1,122 @@ +// +// CameraModel.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import AVFoundation +import Combine + +final class CameraModel: ObservableObject { + private let service = CameraService() + + @Published var showAlertError = false + + @Published var isFlashOn = false + + @Published var willCapturePhoto = false + + @Published var isCameraButtonDisabled = false + + @Published var isPhotoProcessing = false + + @Published var isRecording = false + + @Published var captureMode: CameraMediaType = .image + + @Published public var mediaItems: [MediaItem] = [] + + @Published var thumbnail: Thumbnail! + + var alertError: AlertError! + + var session: AVCaptureSession + + private var subscriptions = Set() + + init() { + self.session = service.session + + service.$shouldShowAlertView.sink { [weak self] (val) in + self?.alertError = self?.service.alertError + self?.showAlertError = val + } + .store(in: &self.subscriptions) + + service.$flashMode.sink { [weak self] (mode) in + self?.isFlashOn = mode == .on + } + .store(in: &self.subscriptions) + + service.$willCapturePhoto.sink { [weak self] (val) in + self?.willCapturePhoto = val + } + .store(in: &self.subscriptions) + + service.$isCameraButtonDisabled.sink { [weak self] (val) in + self?.isCameraButtonDisabled = val + } + .store(in: &self.subscriptions) + + service.$isPhotoProcessing.sink { [weak self] (val) in + self?.isPhotoProcessing = val + } + .store(in: &self.subscriptions) + + service.$isRecording.sink { [weak self] (val) in + self?.isRecording = val + } + .store(in: &self.subscriptions) + + service.$captureMode.sink { [weak self] (mode) in + self?.captureMode = mode + } + .store(in: &self.subscriptions) + + service.$mediaItems.sink { [weak self] (mode) in + self?.mediaItems = mode + } + .store(in: &self.subscriptions) + + service.$thumbnail.sink { [weak self] (thumbnail) in + guard let pic = thumbnail else { return } + self?.thumbnail = pic + } + .store(in: &self.subscriptions) + } + + func configure() { + service.checkForPermissions() + service.configure() + } + + func stop() { + service.stop() + } + + func capturePhoto() { + service.capturePhoto() + } + + func startRecording() { + service.startRecording() + } + + func stopRecording() { + service.stopRecording() + } + + func flipCamera() { + service.changeCamera() + } + + func zoom(with factor: CGFloat) { + service.set(zoom: factor) + } + + func switchFlash() { + service.flashMode = service.flashMode == .on ? .off : .on + } +} diff --git a/damus/Models/Camera/CameraService+Extensions.swift b/damus/Models/Camera/CameraService+Extensions.swift new file mode 100644 index 0000000000..2c31c48847 --- /dev/null +++ b/damus/Models/Camera/CameraService+Extensions.swift @@ -0,0 +1,32 @@ +// +// CameraService+Extensions.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import UIKit +import AVFoundation + +extension AVCaptureVideoOrientation { + init?(deviceOrientation: UIDeviceOrientation) { + switch deviceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeRight + case .landscapeRight: self = .landscapeLeft + default: return nil + } + } + + init?(interfaceOrientation: UIInterfaceOrientation) { + switch interfaceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeLeft + case .landscapeRight: self = .landscapeRight + default: return nil + } + } +} diff --git a/damus/Models/Camera/CameraService.swift b/damus/Models/Camera/CameraService.swift new file mode 100644 index 0000000000..e5a617163a --- /dev/null +++ b/damus/Models/Camera/CameraService.swift @@ -0,0 +1,693 @@ +// +// CameraService.swift +// Campus +// +// Created by Suhail Saqan on 8/5/23. +// + +import Foundation +import Combine +import AVFoundation +import Photos +import UIKit + +public struct Thumbnail: Identifiable, Equatable { + public var id: String + public var type: CameraMediaType + public var url: URL + + public init(id: String = UUID().uuidString, type: CameraMediaType, url: URL) { + self.id = id + self.type = type + self.url = url + } + + public var thumbnailImage: UIImage? { + switch type { + case .image: + return ImageResizer(targetWidth: 100).resize(at: url) + case .video: + return generateVideoThumbnail(for: url) + } + } +} + +public struct AlertError { + public var title: String = "" + public var message: String = "" + public var primaryButtonTitle = "Accept" + public var secondaryButtonTitle: String? + public var primaryAction: (() -> ())? + public var secondaryAction: (() -> ())? + + public init(title: String = "", message: String = "", primaryButtonTitle: String = "Accept", secondaryButtonTitle: String? = nil, primaryAction: (() -> ())? = nil, secondaryAction: (() -> ())? = nil) { + self.title = title + self.message = message + self.primaryAction = primaryAction + self.primaryButtonTitle = primaryButtonTitle + self.secondaryAction = secondaryAction + } +} + +func generateVideoThumbnail(for videoURL: URL) -> UIImage? { + let asset = AVAsset(url: videoURL) + let imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator.appliesPreferredTrackTransform = true + + do { + let cgImage = try imageGenerator.copyCGImage(at: .zero, actualTime: nil) + return UIImage(cgImage: cgImage) + } catch { + print("Error generating thumbnail: \(error)") + return nil + } +} + +public enum CameraMediaType { + case image + case video +} + +public struct MediaItem { + let url: URL + let type: CameraMediaType +} + +public class CameraService: NSObject, Identifiable { + public let session = AVCaptureSession() + + public var isSessionRunning = false + public var isConfigured = false + var setupResult: SessionSetupResult = .success + + public var alertError: AlertError = AlertError() + + @Published public var flashMode: AVCaptureDevice.FlashMode = .off + @Published public var shouldShowAlertView = false + @Published public var isPhotoProcessing = false + @Published public var captureMode: CameraMediaType = .image + @Published public var isRecording: Bool = false + + @Published public var willCapturePhoto = false + @Published public var isCameraButtonDisabled = false + @Published public var isCameraUnavailable = false + @Published public var thumbnail: Thumbnail? + @Published public var mediaItems: [MediaItem] = [] + + public let sessionQueue = DispatchQueue(label: "io.damus.camera") + + @objc dynamic public var videoDeviceInput: AVCaptureDeviceInput! + @objc dynamic public var audioDeviceInput: AVCaptureDeviceInput! + + public let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified) + + public let photoOutput = AVCapturePhotoOutput() + + public let movieOutput = AVCaptureMovieFileOutput() + + var videoCaptureProcessor: VideoCaptureProcessor? + var photoCaptureProcessor: PhotoCaptureProcessor? + + public var keyValueObservations = [NSKeyValueObservation]() + + override public init() { + super.init() + + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + + enum SessionSetupResult { + case success + case notAuthorized + case configurationFailed + } + + public func configure() { + if !self.isSessionRunning && !self.isConfigured { + sessionQueue.async { + self.configureSession() + } + } + } + + public func checkForPermissions() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + break + case .notDetermined: + sessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in + if !granted { + self.setupResult = .notAuthorized + } + self.sessionQueue.resume() + }) + + default: + setupResult = .notAuthorized + + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Access", message: "Damus needs camera and microphone access. Enable in settings.", primaryButtonTitle: "Go to settings", secondaryButtonTitle: nil, primaryAction: { + UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!, + options: [:], completionHandler: nil) + + }, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } + } + + private func configureSession() { + if setupResult != .success { + return + } + + session.beginConfiguration() + + session.sessionPreset = .high + + // Add video input. + do { + var defaultVideoDevice: AVCaptureDevice? + + if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) { + // If a rear dual camera is not available, default to the rear wide angle camera. + defaultVideoDevice = backCameraDevice + } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) { + // If the rear wide angle camera isn't available, default to the front wide angle camera. + defaultVideoDevice = frontCameraDevice + } + + guard let videoDevice = defaultVideoDevice else { + print("Default video device is unavailable.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + print("Couldn't add video device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + let audioDevice = AVCaptureDevice.default(for: .audio) + let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!) + + if session.canAddInput(audioDeviceInput) { + session.addInput(audioDeviceInput) + self.audioDeviceInput = audioDeviceInput + } else { + print("Couldn't add audio device input to the session.") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add video output + if session.canAddOutput(movieOutput) { + session.addOutput(movieOutput) + } else { + print("Could not add movie output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + } catch { + print("Couldn't create video device input: \(error)") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + // Add the photo output. + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + + photoOutput.maxPhotoQualityPrioritization = .quality + + } else { + print("Could not add photo output to the session") + setupResult = .configurationFailed + session.commitConfiguration() + return + } + + session.commitConfiguration() + self.isConfigured = true + + self.start() + } + + private func resumeInterruptedSession() { + sessionQueue.async { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + if !self.session.isRunning { + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Unable to resume camera", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraUnavailable = true + self.isCameraButtonDisabled = true + } + } else { + DispatchQueue.main.async { + self.isCameraUnavailable = false + self.isCameraButtonDisabled = false + } + } + } + } + + public func changeCamera() { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + } + + sessionQueue.async { + let currentVideoDevice = self.videoDeviceInput.device + let currentPosition = currentVideoDevice.position + + let preferredPosition: AVCaptureDevice.Position + let preferredDeviceType: AVCaptureDevice.DeviceType + + switch currentPosition { + case .unspecified, .front: + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + + case .back: + preferredPosition = .front + preferredDeviceType = .builtInWideAngleCamera + + @unknown default: + print("Unknown capture position. Defaulting to back, dual-camera.") + preferredPosition = .back + preferredDeviceType = .builtInWideAngleCamera + } + let devices = self.videoDeviceDiscoverySession.devices + var newVideoDevice: AVCaptureDevice? = nil + + if let device = devices.first(where: { $0.position == preferredPosition && $0.deviceType == preferredDeviceType }) { + newVideoDevice = device + } else if let device = devices.first(where: { $0.position == preferredPosition }) { + newVideoDevice = device + } + + if let videoDevice = newVideoDevice { + do { + let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) + + self.session.beginConfiguration() + + self.session.removeInput(self.videoDeviceInput) + + if self.session.canAddInput(videoDeviceInput) { + NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice) + NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: videoDeviceInput.device) + + self.session.addInput(videoDeviceInput) + self.videoDeviceInput = videoDeviceInput + } else { + self.session.addInput(self.videoDeviceInput) + } + + if let connection = self.photoOutput.connection(with: .video) { + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .auto + } + } + + self.photoOutput.maxPhotoQualityPrioritization = .quality + + self.session.commitConfiguration() + } catch { + print("Error occurred while creating video device input: \(error)") + } + } + + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + } + } + } + + public func focus(with focusMode: AVCaptureDevice.FocusMode, exposureMode: AVCaptureDevice.ExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) { + sessionQueue.async { + guard let device = self.videoDeviceInput?.device else { return } + do { + try device.lockForConfiguration() + + if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) { + device.focusPointOfInterest = devicePoint + device.focusMode = focusMode + } + + if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) { + device.exposurePointOfInterest = devicePoint + device.exposureMode = exposureMode + } + + device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange + device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } + } + + + public func focus(at focusPoint: CGPoint) { + let device = self.videoDeviceInput.device + do { + try device.lockForConfiguration() + if device.isFocusPointOfInterestSupported { + device.focusPointOfInterest = focusPoint + device.exposurePointOfInterest = focusPoint + device.exposureMode = .continuousAutoExposure + device.focusMode = .continuousAutoFocus + device.unlockForConfiguration() + } + } + catch { + print(error.localizedDescription) + } + } + + @objc public func stop(completion: (() -> ())? = nil) { + sessionQueue.async { + if self.isSessionRunning { + if self.setupResult == .success { + self.session.stopRunning() + self.isSessionRunning = self.session.isRunning + print("CAMERA STOPPED") + self.removeObservers() + + if !self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + completion?() + } + } + } + } + } + } + + @objc public func start() { + sessionQueue.async { + if !self.isSessionRunning && self.isConfigured { + switch self.setupResult { + case .success: + self.addObservers() + self.session.startRunning() + print("CAMERA RUNNING") + self.isSessionRunning = self.session.isRunning + + if self.session.isRunning { + DispatchQueue.main.async { + self.isCameraButtonDisabled = false + self.isCameraUnavailable = false + } + } + + case .notAuthorized: + print("Application not authorized to use camera") + DispatchQueue.main.async { + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + + case .configurationFailed: + DispatchQueue.main.async { + self.alertError = AlertError(title: "Camera Error", message: "Camera configuration failed. Either your device camera is not available or other application is using it", primaryButtonTitle: "Accept", secondaryButtonTitle: nil, primaryAction: nil, secondaryAction: nil) + self.shouldShowAlertView = true + self.isCameraButtonDisabled = true + self.isCameraUnavailable = true + } + } + } + } + } + + public func set(zoom: CGFloat) { + let factor = zoom < 1 ? 1 : zoom + let device = self.videoDeviceInput.device + + do { + try device.lockForConfiguration() + device.videoZoomFactor = factor + device.unlockForConfiguration() + } + catch { + print(error.localizedDescription) + } + } + + public func capturePhoto() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video) { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + var photoSettings = AVCapturePhotoSettings() + + // Capture HEIF photos when supported. Enable according to user settings and high-resolution photos. + if (self.photoOutput.availablePhotoCodecTypes.contains(.hevc)) { + photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc]) + } + + if self.videoDeviceInput.device.isFlashAvailable { + photoSettings.flashMode = self.flashMode + } + + if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty { + photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!] + } + + photoSettings.photoQualityPrioritization = .speed + + if self.photoCaptureProcessor == nil { + self.photoCaptureProcessor = PhotoCaptureProcessor(with: photoSettings, photoOutput: self.photoOutput, willCapturePhotoAnimation: { + DispatchQueue.main.async { + self.willCapturePhoto.toggle() + self.willCapturePhoto.toggle() + } + }, completionHandler: { (photoCaptureProcessor) in + if let data = photoCaptureProcessor.photoData { + let url = self.savePhoto(data: data) + if let unwrappedURL = url { + self.thumbnail = Thumbnail(type: .image, url: unwrappedURL) + } + } else { + print("Data for photo not found") + } + + self.isCameraButtonDisabled = false + }, photoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.photoCaptureProcessor?.capturePhoto(settings: photoSettings) + } + } + } + + public func startRecording() { + if self.setupResult != .configurationFailed { + let videoPreviewLayerOrientation: AVCaptureVideoOrientation = .portrait + self.isCameraButtonDisabled = true + + sessionQueue.async { + if let videoOutputConnection = self.movieOutput.connection(with: .video) { + videoOutputConnection.videoOrientation = videoPreviewLayerOrientation + + var videoSettings = [String: Any]() + + if self.movieOutput.availableVideoCodecTypes.contains(.hevc) == true { + videoSettings[AVVideoCodecKey] = AVVideoCodecType.hevc + self.movieOutput.setOutputSettings(videoSettings, for: videoOutputConnection) + } + } + + if self.videoCaptureProcessor == nil { + self.videoCaptureProcessor = VideoCaptureProcessor(movieOutput: self.movieOutput, beginHandler: { + self.isRecording = true + }, completionHandler: { (videoCaptureProcessor, outputFileURL) in + self.isCameraButtonDisabled = false + self.captureMode = .image + + self.mediaItems.append(MediaItem(url: outputFileURL, type: .video)) + self.thumbnail = Thumbnail(type: .video, url: outputFileURL) + }, videoProcessingHandler: { animate in + self.isPhotoProcessing = animate + }) + } + + self.videoCaptureProcessor?.startCapture(session: self.session) + } + } + } + + func stopRecording() { + if let videoCaptureProcessor = self.videoCaptureProcessor { + isRecording = false + videoCaptureProcessor.stopCapture() + } + } + + func savePhoto(imageType: String = "jpeg", data: Data) -> URL? { + guard let uiImage = UIImage(data: data) else { + print("Error converting media data to UIImage") + return nil + } + + guard let compressedData = uiImage.jpegData(compressionQuality: 0.8) else { + print("Error converting UIImage to JPEG data") + return nil + } + + let temporaryDirectory = NSTemporaryDirectory() + let tempFileName = "\(UUID().uuidString).\(imageType)" + let tempFileURL = URL(fileURLWithPath: temporaryDirectory).appendingPathComponent(tempFileName) + + do { + try compressedData.write(to: tempFileURL) + self.mediaItems.append(MediaItem(url: tempFileURL, type: .image)) + return tempFileURL + } catch { + print("Error saving image data to temporary URL: \(error.localizedDescription)") + } + return nil + } + + private func addObservers() { + let systemPressureStateObservation = observe(\.videoDeviceInput.device.systemPressureState, options: .new) { _, change in + guard let systemPressureState = change.newValue else { return } + self.setRecommendedFrameRateRangeForPressureState(systemPressureState: systemPressureState) + } + keyValueObservations.append(systemPressureStateObservation) + +// NotificationCenter.default.addObserver(self, selector: #selector(self.onOrientationChange), name: UIDevice.orientationDidChangeNotification, object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(subjectAreaDidChange), + name: .AVCaptureDeviceSubjectAreaDidChange, + object: videoDeviceInput.device) + + NotificationCenter.default.addObserver(self, selector: #selector(uiRequestedNewFocusArea), name: .init(rawValue: "UserDidRequestNewFocusPoint"), object: nil) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionRuntimeError), + name: .AVCaptureSessionRuntimeError, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionWasInterrupted), + name: .AVCaptureSessionWasInterrupted, + object: session) + + NotificationCenter.default.addObserver(self, + selector: #selector(sessionInterruptionEnded), + name: .AVCaptureSessionInterruptionEnded, + object: session) + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self) + + for keyValueObservation in keyValueObservations { + keyValueObservation.invalidate() + } + keyValueObservations.removeAll() + } + + @objc private func uiRequestedNewFocusArea(notification: NSNotification) { + guard let userInfo = notification.userInfo as? [String: Any], let devicePoint = userInfo["devicePoint"] as? CGPoint else { return } + self.focus(at: devicePoint) + } + + @objc + private func subjectAreaDidChange(notification: NSNotification) { + let devicePoint = CGPoint(x: 0.5, y: 0.5) + focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false) + } + + @objc + private func sessionRuntimeError(notification: NSNotification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + } + } + } + + private func setRecommendedFrameRateRangeForPressureState(systemPressureState: AVCaptureDevice.SystemPressureState) { + let pressureLevel = systemPressureState.level + if pressureLevel == .serious || pressureLevel == .critical { + do { + try self.videoDeviceInput.device.lockForConfiguration() + print("WARNING: Reached elevated system pressure level: \(pressureLevel). Throttling frame rate.") + self.videoDeviceInput.device.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 20) + self.videoDeviceInput.device.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 15) + self.videoDeviceInput.device.unlockForConfiguration() + } catch { + print("Could not lock device for configuration: \(error)") + } + } else if pressureLevel == .shutdown { + print("Session stopped running due to shutdown system pressure level.") + } + } + + @objc + private func sessionWasInterrupted(notification: NSNotification) { + DispatchQueue.main.async { + self.isCameraUnavailable = true + } + + if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, + let reasonIntegerValue = userInfoValue.integerValue, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) { + print("Capture session was interrupted with reason \(reason)") + + if reason == .audioDeviceInUseByAnotherClient || reason == .videoDeviceInUseByAnotherClient { + print("Session stopped running due to video devies in use by another client.") + } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps { + print("Session stopped running due to video devies is not available with multiple foreground apps.") + } else if reason == .videoDeviceNotAvailableDueToSystemPressure { + print("Session stopped running due to shutdown system pressure level.") + } + } + } + + @objc + private func sessionInterruptionEnded(notification: NSNotification) { + print("Capture session interruption ended") + DispatchQueue.main.async { + self.isCameraUnavailable = false + } + } +} From dd30608a717904e127376bffe4e4ca31de626ed1 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Thu, 21 Sep 2023 12:20:34 -0700 Subject: [PATCH 09/13] add CameraModel and CameraService for interacting with the camera --- damus.xcodeproj/project.pbxproj | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 18e32e922d..b5b5bf6c9b 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -380,6 +380,9 @@ BA37598A2ABCCDE40018D73B /* ImageResizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759892ABCCDE30018D73B /* ImageResizer.swift */; }; BA37598D2ABCCE500018D73B /* PhotoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */; }; BA37598E2ABCCE500018D73B /* VideoCaptureProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */; }; + BA3759922ABCCEBA0018D73B /* CameraService+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */; }; + BA3759932ABCCEBA0018D73B /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759902ABCCEBA0018D73B /* CameraModel.swift */; }; + BA3759942ABCCEBA0018D73B /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759912ABCCEBA0018D73B /* CameraService.swift */; }; BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; @@ -934,6 +937,9 @@ BA3759892ABCCDE30018D73B /* ImageResizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageResizer.swift; sourceTree = ""; }; BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureProcessor.swift; sourceTree = ""; }; BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCaptureProcessor.swift; sourceTree = ""; }; + BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraService+Extensions.swift"; sourceTree = ""; }; + BA3759902ABCCEBA0018D73B /* CameraModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = ""; }; + BA3759912ABCCEBA0018D73B /* CameraService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = ""; }; BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; @@ -1870,6 +1876,9 @@ BA3759882ABCCDE30018D73B /* Camera */ = { isa = PBXGroup; children = ( + BA3759902ABCCEBA0018D73B /* CameraModel.swift */, + BA3759912ABCCEBA0018D73B /* CameraService.swift */, + BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */, BA3759892ABCCDE30018D73B /* ImageResizer.swift */, BA37598B2ABCCE500018D73B /* PhotoCaptureProcessor.swift */, BA37598C2ABCCE500018D73B /* VideoCaptureProcessor.swift */, @@ -2204,6 +2213,7 @@ 4C363A8428233689006E126D /* Parser.swift in Sources */, 3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */, 4CE4F9E328528C5200C00DD9 /* AddRelayView.swift in Sources */, + BA3759922ABCCEBA0018D73B /* CameraService+Extensions.swift in Sources */, 4C363A9A28283854006E126D /* Reply.swift in Sources */, BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */, 4CFF8F6729CC9E3A008DB934 /* ImageView.swift in Sources */, @@ -2240,6 +2250,7 @@ 4C198DF529F88D2E004C165C /* ImageMetadata.swift in Sources */, 4CCEB7AE29B53D260078AA28 /* SearchingEventView.swift in Sources */, 4CF0ABE929844AF100D66079 /* AnyCodable.swift in Sources */, + BA3759932ABCCEBA0018D73B /* CameraModel.swift in Sources */, 4C0A3F8F280F640A000448DE /* ThreadModel.swift in Sources */, 4C3AC79F2833115300E1F516 /* FollowButtonView.swift in Sources */, 4C4E137B2A76D5FB00BDD832 /* MuteThreadNotify.swift in Sources */, @@ -2339,6 +2350,7 @@ 4C5F9114283D694D0052CD1C /* FollowTarget.swift in Sources */, 4CF0ABD629817F5B00D66079 /* ReportView.swift in Sources */, 4C1A9A2729DDE31900516EAC /* TranslationSettingsView.swift in Sources */, + BA3759942ABCCEBA0018D73B /* CameraService.swift in Sources */, 4CB8838629656C8B00DC99E7 /* NIP05.swift in Sources */, 4CF0ABD82981980C00D66079 /* Lists.swift in Sources */, F71694EA2A662232001F4053 /* SuggestedUsersView.swift in Sources */, From ada2b8f53b231d00deea646fc15ac1d2f696026a Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:35:52 -0700 Subject: [PATCH 10/13] add CameraPreview for displaying the view the camera is reading --- damus/Views/Camera/CameraPreview.swift | 95 ++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 damus/Views/Camera/CameraPreview.swift diff --git a/damus/Views/Camera/CameraPreview.swift b/damus/Views/Camera/CameraPreview.swift new file mode 100644 index 0000000000..48b4d98a66 --- /dev/null +++ b/damus/Views/Camera/CameraPreview.swift @@ -0,0 +1,95 @@ +// +// CameraPreview.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import UIKit +import AVFoundation +import SwiftUI + +public struct CameraPreview: UIViewRepresentable { + public class VideoPreviewView: UIView { + public override class var layerClass: AnyClass { + AVCaptureVideoPreviewLayer.self + } + + var videoPreviewLayer: AVCaptureVideoPreviewLayer { + return layer as! AVCaptureVideoPreviewLayer + } + + let focusView: UIView = { + let focusView = UIView(frame: CGRect(x: 0, y: 0, width: 30, height: 30)) + focusView.layer.borderColor = UIColor.white.cgColor + focusView.layer.borderWidth = 1.5 + focusView.layer.cornerRadius = 15 + focusView.layer.opacity = 0 + focusView.backgroundColor = .clear + return focusView + }() + + @objc func focusAndExposeTap(gestureRecognizer: UITapGestureRecognizer) { + let layerPoint = gestureRecognizer.location(in: gestureRecognizer.view) + + guard layerPoint.x >= 0 && layerPoint.x <= bounds.width && + layerPoint.y >= 0 && layerPoint.y <= bounds.height else { + return + } + + let devicePoint = videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: layerPoint) + + self.focusView.layer.frame = CGRect(origin: layerPoint, size: CGSize(width: 30, height: 30)) + + NotificationCenter.default.post(.init(name: .init("UserDidRequestNewFocusPoint"), object: nil, userInfo: ["devicePoint": devicePoint] as [AnyHashable: Any])) + + UIView.animate(withDuration: 0.3, animations: { + self.focusView.layer.opacity = 1 + }) { (completed) in + if completed { + UIView.animate(withDuration: 0.3) { + self.focusView.layer.opacity = 0 + } + } + } + } + + public override func layoutSubviews() { + super.layoutSubviews() + + videoPreviewLayer.videoGravity = .resizeAspectFill + + self.layer.addSublayer(focusView.layer) + + let gRecognizer = UITapGestureRecognizer(target: self, action: #selector(VideoPreviewView.focusAndExposeTap(gestureRecognizer:))) + self.addGestureRecognizer(gRecognizer) + } + } + + public let session: AVCaptureSession + + public init(session: AVCaptureSession) { + self.session = session + } + + public func makeUIView(context: Context) -> VideoPreviewView { + let viewFinder = VideoPreviewView() + viewFinder.backgroundColor = .black + viewFinder.videoPreviewLayer.cornerRadius = 20 + viewFinder.videoPreviewLayer.session = session + viewFinder.videoPreviewLayer.connection?.videoOrientation = .portrait + + return viewFinder + } + + public func updateUIView(_ uiView: VideoPreviewView, context: Context) { + + } +} + +struct CameraPreview_Previews: PreviewProvider { + static var previews: some View { + CameraPreview(session: AVCaptureSession()) + .frame(height: 300) + } +} From 2c0a1bfbe838cc8b980b12e8ccff214b68138de1 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Thu, 21 Sep 2023 12:22:11 -0700 Subject: [PATCH 11/13] add CameraPreview for displaying the view the camera is reading --- damus.xcodeproj/project.pbxproj | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index b5b5bf6c9b..fa5347e102 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -383,6 +383,7 @@ BA3759922ABCCEBA0018D73B /* CameraService+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */; }; BA3759932ABCCEBA0018D73B /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759902ABCCEBA0018D73B /* CameraModel.swift */; }; BA3759942ABCCEBA0018D73B /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759912ABCCEBA0018D73B /* CameraService.swift */; }; + BA3759972ABCCF360018D73B /* CameraPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA3759962ABCCF360018D73B /* CameraPreview.swift */; }; BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; @@ -940,6 +941,7 @@ BA37598F2ABCCEBA0018D73B /* CameraService+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraService+Extensions.swift"; sourceTree = ""; }; BA3759902ABCCEBA0018D73B /* CameraModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = ""; }; BA3759912ABCCEBA0018D73B /* CameraService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = ""; }; + BA3759962ABCCF360018D73B /* CameraPreview.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraPreview.swift; sourceTree = ""; }; BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; @@ -1247,6 +1249,7 @@ 4C75EFA227FA576C0006080F /* Views */ = { isa = PBXGroup; children = ( + BA3759952ABCCF360018D73B /* Camera */, F71694E82A66221E001F4053 /* Onboarding */, 4C190F232A547D1700027FD5 /* NostrScript */, 4C7D09692A0AEA0400943473 /* CodeScanner */, @@ -1886,6 +1889,14 @@ path = Camera; sourceTree = ""; }; + BA3759952ABCCF360018D73B /* Camera */ = { + isa = PBXGroup; + children = ( + BA3759962ABCCF360018D73B /* CameraPreview.swift */, + ); + path = Camera; + sourceTree = ""; + }; F71694E82A66221E001F4053 /* Onboarding */ = { isa = PBXGroup; children = ( @@ -2209,6 +2220,7 @@ 4C75EFB328049D640006080F /* NostrEvent.swift in Sources */, 4CA2EFA0280E37AC0044ACD8 /* TimelineView.swift in Sources */, 4C30AC7629A5770900E2BD5A /* NotificationItemView.swift in Sources */, + BA3759972ABCCF360018D73B /* CameraPreview.swift in Sources */, 4C86F7C42A76C44C00EC0817 /* ZappingNotify.swift in Sources */, 4C363A8428233689006E126D /* Parser.swift in Sources */, 3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */, From 8f059233b6cc25896373a650b69160432e38b327 Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Tue, 19 Sep 2023 13:38:41 -0700 Subject: [PATCH 12/13] change the camera view to the new custom camera --- damus.xcodeproj/project.pbxproj | 19 +- .../xcshareddata/swiftpm/Package.resolved | 39 ---- damus/Views/Camera/CameraView.swift | 212 ++++++++++++++++++ damus/Views/PostView.swift | 42 ++-- 4 files changed, 250 insertions(+), 62 deletions(-) delete mode 100644 damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved create mode 100644 damus/Views/Camera/CameraView.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index fa5347e102..033a47a8c8 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -387,6 +387,8 @@ BA4AB0AE2A63B9270070A32A /* AddEmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */; }; BA4AB0B02A63B94D0070A32A /* EmojiListItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */; }; BA693074295D649800ADDB87 /* UserSettingsStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA693073295D649800ADDB87 /* UserSettingsStore.swift */; }; + BA8A4F0F2A2D95F70045C48C /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA8A4F0E2A2D95F70045C48C /* CameraView.swift */; }; + BA8A4F132A2D96AD0045C48C /* CameraPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = BA8A4F122A2D96AD0045C48C /* CameraPreview.swift */; }; BAB68BED29543FA3007BA466 /* SelectWalletView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BAB68BEC29543FA3007BA466 /* SelectWalletView.swift */; }; D2277EEA2A089BD5006C3807 /* Router.swift in Sources */ = {isa = PBXBuildFile; fileRef = D2277EE92A089BD5006C3807 /* Router.swift */; }; D78525252A7B2EA4002FA637 /* NoteContentViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D78525242A7B2EA4002FA637 /* NoteContentViewTests.swift */; }; @@ -945,6 +947,8 @@ BA4AB0AD2A63B9270070A32A /* AddEmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AddEmojiView.swift; sourceTree = ""; }; BA4AB0AF2A63B94D0070A32A /* EmojiListItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiListItemView.swift; sourceTree = ""; }; BA693073295D649800ADDB87 /* UserSettingsStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSettingsStore.swift; sourceTree = ""; }; + BA8A4F0E2A2D95F70045C48C /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; + BA8A4F122A2D96AD0045C48C /* CameraPreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPreview.swift; sourceTree = ""; }; BAB68BEC29543FA3007BA466 /* SelectWalletView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SelectWalletView.swift; sourceTree = ""; }; D2277EE92A089BD5006C3807 /* Router.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Router.swift; sourceTree = ""; }; D78525242A7B2EA4002FA637 /* NoteContentViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoteContentViewTests.swift; sourceTree = ""; }; @@ -1252,6 +1256,7 @@ BA3759952ABCCF360018D73B /* Camera */, F71694E82A66221E001F4053 /* Onboarding */, 4C190F232A547D1700027FD5 /* NostrScript */, + BA8A4F0D2A2D95880045C48C /* Camera */, 4C7D09692A0AEA0400943473 /* CodeScanner */, 4C7D095A2A098C5C00943473 /* Wallet */, 4C8D1A6D29F31E4100ACDF75 /* Buttons */, @@ -2147,6 +2152,7 @@ 4C190F252A547D2000027FD5 /* LoadScript.swift in Sources */, 4C59B98C2A76C2550032FFEB /* ProfileUpdatedNotify.swift in Sources */, 4C363A8C28236B92006E126D /* PubkeyView.swift in Sources */, + BA3BF28A2A7F156B00600232 /* ImageResizer.swift in Sources */, 4CDA128A29E9D10C0006FA5A /* SignalView.swift in Sources */, 4C12535C2A76CA540004F4B8 /* LoginNotify.swift in Sources */, 4C5C7E68284ED36500A22DF5 /* SearchHomeModel.swift in Sources */, @@ -2222,6 +2228,7 @@ 4C30AC7629A5770900E2BD5A /* NotificationItemView.swift in Sources */, BA3759972ABCCF360018D73B /* CameraPreview.swift in Sources */, 4C86F7C42A76C44C00EC0817 /* ZappingNotify.swift in Sources */, + BA27222E2A806E39004CDF52 /* VideoCaptureProcessor.swift in Sources */, 4C363A8428233689006E126D /* Parser.swift in Sources */, 3AAA95CA298DF87B00F3D526 /* TranslationService.swift in Sources */, 4CE4F9E328528C5200C00DD9 /* AddRelayView.swift in Sources */, @@ -2316,6 +2323,7 @@ 4C4E137D2A76D63600BDD832 /* UnmuteThreadNotify.swift in Sources */, 4CE4F0F829DB7399005914DB /* ThiccDivider.swift in Sources */, 4CE0E2B629A3ED5500DB4CA2 /* InnerTimelineView.swift in Sources */, + BA3BF28C2A7F156B00600232 /* CameraService.swift in Sources */, 4C363A8828236948006E126D /* BlocksView.swift in Sources */, 4C06670628FCB08600038D2A /* ImageCarousel.swift in Sources */, 3A23838E2A297DD200E5AA2E /* ZapButtonModel.swift in Sources */, @@ -2323,6 +2331,7 @@ 4C1D4FB12A7958E60024F453 /* VersionInfo.swift in Sources */, 5053ACA72A56DF3B00851AE3 /* DeveloperSettingsView.swift in Sources */, F79C7FAD29D5E9620000F946 /* EditPictureControl.swift in Sources */, + BA3BF2892A7F156B00600232 /* PhotoCaptureProcessor.swift in Sources */, 4C9F18E229AA9B6C008C55EC /* CustomizeZapView.swift in Sources */, 4C2859602A12A2BE004746F7 /* SupporterBadge.swift in Sources */, 4C1A9A2A29DDF54400516EAC /* DamusVideoPlayer.swift in Sources */, @@ -2397,6 +2406,7 @@ 4C3EA66028FF5E7700C48A62 /* node_id.c in Sources */, 4C687C212A5F7ED00092C550 /* DamusBackground.swift in Sources */, 4CA352A02A76AE80003BB08B /* Notify.swift in Sources */, + BA3BF28B2A7F156B00600232 /* CameraService+Extensions.swift in Sources */, 4CE6DEE727F7A08100C66700 /* damusApp.swift in Sources */, 4C1253582A76C9060004F4B8 /* PresentSheetNotify.swift in Sources */, 4C363A962827096D006E126D /* PostBlock.swift in Sources */, @@ -2442,6 +2452,7 @@ 5C513FBA297F72980072348F /* CustomPicker.swift in Sources */, 4C1253622A76D00B0004F4B8 /* PostNotify.swift in Sources */, 4CACA9D5280C31E100D9BBE8 /* ReplyView.swift in Sources */, + BA3BF28F2A7F1B2D00600232 /* CameraModel.swift in Sources */, F7908E92298B0F0700AB113A /* RelayDetailView.swift in Sources */, 4C9147002A2A891E00DDEA40 /* error.c in Sources */, 4CE879552996BAB900F758CC /* RelayPaidDetail.swift in Sources */, @@ -2451,6 +2462,7 @@ 4CF0ABD42980996B00D66079 /* Report.swift in Sources */, 4C06670B28FDE64700038D2A /* damus.c in Sources */, 4C1253642A76D08F0004F4B8 /* ReportNotify.swift in Sources */, + BA8A4F0F2A2D95F70045C48C /* CameraView.swift in Sources */, 4C1A9A2529DDDF2600516EAC /* ZapSettingsView.swift in Sources */, 4C2CDDF7299D4A5E00879FD5 /* Debouncer.swift in Sources */, 3AAA95CC298E07E900F3D526 /* DeepLPlan.swift in Sources */, @@ -2476,6 +2488,7 @@ 3AA59D1D2999B0400061C48E /* DraftsModel.swift in Sources */, 3169CAED294FCCFC00EE4006 /* Constants.swift in Sources */, 4C9AA14A2A4587A6003F49FD /* NotificationStatusModel.swift in Sources */, + BA8A4F132A2D96AD0045C48C /* CameraPreview.swift in Sources */, 4CB9D4A72992D02B00A9A7E4 /* ProfileNameView.swift in Sources */, 4CE4F0F429D779B5005914DB /* PostBox.swift in Sources */, BA37598E2ABCCE500018D73B /* VideoCaptureProcessor.swift in Sources */, @@ -2788,7 +2801,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; @@ -2814,7 +2827,7 @@ "$(PROJECT_DIR)", ); MARKETING_VERSION = 1.6; - PRODUCT_BUNDLE_IDENTIFIER = com.jb55.damus2; + PRODUCT_BUNDLE_IDENTIFIER = com.suhail.damus2; PRODUCT_NAME = "$(TARGET_NAME)"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; @@ -2837,7 +2850,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 11; DEVELOPMENT_ASSET_PATHS = "\"damus/Preview Content\""; - DEVELOPMENT_TEAM = XK7H4JAB3D; + DEVELOPMENT_TEAM = XL4476DR2X; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = damus/Info.plist; diff --git a/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved deleted file mode 100644 index c8409240c6..0000000000 --- a/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ /dev/null @@ -1,39 +0,0 @@ -{ - "pins" : [ - { - "identity" : "gsplayer", - "kind" : "remoteSourceControl", - "location" : "https://github.com/wxxsw/GSPlayer", - "state" : { - "revision" : "aa6dad7943d52f5207f7fcc2ad3e4274583443b8", - "version" : "0.2.26" - } - }, - { - "identity" : "kingfisher", - "kind" : "remoteSourceControl", - "location" : "https://github.com/onevcat/Kingfisher", - "state" : { - "revision" : "415b1d97fb38bda1e5a6b2dde63354720832110b", - "version" : "7.6.1" - } - }, - { - "identity" : "secp256k1.swift", - "kind" : "remoteSourceControl", - "location" : "https://github.com/jb55/secp256k1.swift", - "state" : { - "revision" : "40b4b38b3b1c83f7088c76189a742870e0ca06a9" - } - }, - { - "identity" : "swift-markdown-ui", - "kind" : "remoteSourceControl", - "location" : "https://github.com/damus-io/swift-markdown-ui", - "state" : { - "revision" : "76bb7971da7fbf429de1c84f1244adf657242fee" - } - } - ], - "version" : 2 -} diff --git a/damus/Views/Camera/CameraView.swift b/damus/Views/Camera/CameraView.swift new file mode 100644 index 0000000000..c021bb710c --- /dev/null +++ b/damus/Views/Camera/CameraView.swift @@ -0,0 +1,212 @@ +// +// CameraView.swift +// damus +// +// Created by Suhail Saqan on 8/5/23. +// + +import SwiftUI +import Combine +import AVFoundation + +struct CameraView: View { + let damus_state: DamusState + let action: (([MediaItem]) -> Void) + + @Environment(\.presentationMode) var presentationMode + + @StateObject var model: CameraModel + + @State var currentZoomFactor: CGFloat = 1.0 + + public init(damus_state: DamusState, action: @escaping (([MediaItem]) -> Void)) { + self.damus_state = damus_state + self.action = action + _model = StateObject(wrappedValue: CameraModel()) + } + + var captureButton: some View { + Button { + if model.isRecording { + withAnimation { + model.stopRecording() + } + } else { + withAnimation { + model.capturePhoto() + } + } + UIImpactFeedbackGenerator(style: .medium).impactOccurred() + } label: { + ZStack { + Circle() + .fill( model.isRecording ? .red : DamusColors.black) + .frame(width: model.isRecording ? 85 : 65, height: model.isRecording ? 85 : 65, alignment: .center) + + Circle() + .stroke( model.isRecording ? .red : DamusColors.white, lineWidth: 4) + .frame(width: model.isRecording ? 95 : 75, height: model.isRecording ? 95 : 75, alignment: .center) + } + .frame(alignment: .center) + } + .simultaneousGesture( + LongPressGesture(minimumDuration: 0.5).onEnded({ value in + if (!model.isCameraButtonDisabled) { + withAnimation { + model.startRecording() + model.captureMode = .video + } + } + }) + ) + .buttonStyle(.plain) + } + + var capturedPhotoThumbnail: some View { + ZStack { + if model.thumbnail != nil { + Image(uiImage: model.thumbnail.thumbnailImage!) + .resizable() + .aspectRatio(contentMode: .fill) + .frame(width: 60, height: 60) + .clipShape(RoundedRectangle(cornerRadius: 10, style: .continuous)) + } + if model.isPhotoProcessing { + ProgressView() + .progressViewStyle(CircularProgressViewStyle(tint: DamusColors.white)) + } + } + } + + var closeButton: some View { + Button { + presentationMode.wrappedValue.dismiss() + model.stop() + } label: { + HStack { + Image(systemName: "xmark") + .font(.system(size: 24)) + } + .frame(minWidth: 40, minHeight: 40) + } + .accentColor(DamusColors.white) + } + + var flipCameraButton: some View { + Button(action: { + model.flipCamera() + }, label: { + HStack { + Image(systemName: "camera.rotate.fill") + .font(.system(size: 20)) + } + .frame(minWidth: 40, minHeight: 40) + }) + .accentColor(DamusColors.white) + } + + var toggleFlashButton: some View { + Button(action: { + model.switchFlash() + }, label: { + HStack { + Image(systemName: model.isFlashOn ? "bolt.fill" : "bolt.slash.fill") + .font(.system(size: 20)) + } + .frame(minWidth: 40, minHeight: 40) + }) + .accentColor(model.isFlashOn ? .yellow : DamusColors.white) + } + + var body: some View { + NavigationView { + GeometryReader { reader in + ZStack { + DamusColors.black.edgesIgnoringSafeArea(.all) + + CameraPreview(session: model.session) + .padding(.bottom, 175) + .edgesIgnoringSafeArea(.all) + .gesture( + DragGesture().onChanged({ (val) in + if abs(val.translation.height) > abs(val.translation.width) { + let percentage: CGFloat = -(val.translation.height / reader.size.height) + let calc = currentZoomFactor + percentage + let zoomFactor: CGFloat = min(max(calc, 1), 5) + + currentZoomFactor = zoomFactor + model.zoom(with: zoomFactor) + } + }) + ) + .onAppear { + model.configure() + } + .alert(isPresented: $model.showAlertError, content: { + Alert(title: Text(model.alertError.title), message: Text(model.alertError.message), dismissButton: .default(Text(model.alertError.primaryButtonTitle), action: { + model.alertError.primaryAction?() + })) + }) + .overlay( + Group { + if model.willCapturePhoto { + Color.black + } + } + ) + + VStack { + if !model.isRecording { + HStack { + closeButton + + Spacer() + + HStack { + flipCameraButton + toggleFlashButton + } + } + .padding(.horizontal, 20) + } + + Spacer() + + HStack(alignment: .center) { + if !model.mediaItems.isEmpty { + NavigationLink(destination: Text(model.mediaItems.map { $0.url.absoluteString }.joined(separator: ", "))) { + capturedPhotoThumbnail + } + .frame(width: 100, alignment: .leading) + } + + Spacer() + + captureButton + + Spacer() + + if !model.mediaItems.isEmpty { + Button(action: { + action(model.mediaItems) + presentationMode.wrappedValue.dismiss() + model.stop() + }) { + Text("Upload") + .frame(width: 100, height: 40, alignment: .center) + .foregroundColor(DamusColors.white) + .overlay { + RoundedRectangle(cornerRadius: 24) + .stroke(DamusColors.white, lineWidth: 2) + } + } + } + } + .frame(height: 100) + .padding([.horizontal, .vertical], 20) + } + } + } + } + } +} diff --git a/damus/Views/PostView.swift b/damus/Views/PostView.swift index 8c7897087e..d2fde5597f 100644 --- a/damus/Views/PostView.swift +++ b/damus/Views/PostView.swift @@ -56,7 +56,7 @@ struct PostView: View { @State var newCursorIndex: Int? @State var postTextViewCanScroll: Bool = true - @State var mediaToUpload: MediaUpload? = nil + @State var mediaToUpload: [MediaUpload] = [] @StateObject var image_upload: ImageUploadModel = ImageUploadModel() @StateObject var tagModel: TagModel = TagModel() @@ -325,6 +325,15 @@ struct PostView: View { pks.append(pk) } } + + func addToMediaToUpload(mediaItem: MediaItem) { + switch mediaItem.type { + case .image: + mediaToUpload.append(.image(mediaItem.url)) + case .video: + mediaToUpload.append(.video(mediaItem.url)) + } + } var body: some View { GeometryReader { (deviceSize: GeometryProxy) in @@ -363,36 +372,29 @@ struct PostView: View { } .sheet(isPresented: $attach_media) { ImagePicker(uploader: damus_state.settings.default_media_uploader, sourceType: .photoLibrary, pubkey: damus_state.pubkey, image_upload_confirm: $image_upload_confirm) { img in - self.mediaToUpload = .image(img) + self.mediaToUpload.append(.image(img)) } onVideoPicked: { url in - self.mediaToUpload = .video(url) + self.mediaToUpload.append(.video(url)) } .alert(NSLocalizedString("Are you sure you want to upload this media?", comment: "Alert message asking if the user wants to upload media."), isPresented: $image_upload_confirm) { Button(NSLocalizedString("Upload", comment: "Button to proceed with uploading."), role: .none) { - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) + if !mediaToUpload.isEmpty { + self.handle_upload(media: mediaToUpload[0]) self.attach_media = false } } Button(NSLocalizedString("Cancel", comment: "Button to cancel the upload."), role: .cancel) {} } } - .sheet(isPresented: $attach_camera) { - - ImagePicker(uploader: damus_state.settings.default_media_uploader, sourceType: .camera, pubkey: damus_state.pubkey, image_upload_confirm: $image_upload_confirm) { img in - self.mediaToUpload = .image(img) - } onVideoPicked: { url in - self.mediaToUpload = .video(url) - } - .alert(NSLocalizedString("Are you sure you want to upload this media?", comment: "Alert message asking if the user wants to upload media."), isPresented: $image_upload_confirm) { - Button(NSLocalizedString("Upload", comment: "Button to proceed with uploading."), role: .none) { - if let mediaToUpload { - self.handle_upload(media: mediaToUpload) - self.attach_camera = false - } + .fullScreenCover(isPresented: $attach_camera) { + CameraView(damus_state: damus_state, action: { items in + for item in items { + addToMediaToUpload(mediaItem: item) } - Button(NSLocalizedString("Cancel", comment: "Button to cancel the upload."), role: .cancel) {} - } + for media in mediaToUpload { + self.handle_upload(media: media) + } + }) } .onAppear() { let loaded_draft = load_draft() From aeb33b48325f12bcf89c2b9ef810e8014721215f Mon Sep 17 00:00:00 2001 From: Suhail Saqan Date: Thu, 21 Sep 2023 12:25:54 -0700 Subject: [PATCH 13/13] Create Package.resolved --- .../xcshareddata/swiftpm/Package.resolved | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved diff --git a/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved new file mode 100644 index 0000000000..c8409240c6 --- /dev/null +++ b/damus.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -0,0 +1,39 @@ +{ + "pins" : [ + { + "identity" : "gsplayer", + "kind" : "remoteSourceControl", + "location" : "https://github.com/wxxsw/GSPlayer", + "state" : { + "revision" : "aa6dad7943d52f5207f7fcc2ad3e4274583443b8", + "version" : "0.2.26" + } + }, + { + "identity" : "kingfisher", + "kind" : "remoteSourceControl", + "location" : "https://github.com/onevcat/Kingfisher", + "state" : { + "revision" : "415b1d97fb38bda1e5a6b2dde63354720832110b", + "version" : "7.6.1" + } + }, + { + "identity" : "secp256k1.swift", + "kind" : "remoteSourceControl", + "location" : "https://github.com/jb55/secp256k1.swift", + "state" : { + "revision" : "40b4b38b3b1c83f7088c76189a742870e0ca06a9" + } + }, + { + "identity" : "swift-markdown-ui", + "kind" : "remoteSourceControl", + "location" : "https://github.com/damus-io/swift-markdown-ui", + "state" : { + "revision" : "76bb7971da7fbf429de1c84f1244adf657242fee" + } + } + ], + "version" : 2 +}