I am following alongside this digicam tutorial from Apple: https://developer.apple.com/tutorials/sample-apps/capturingphotos-camerapreview
I’ve a DataModel:
closing class DataModel: ObservableObject {
let digicam = Digital camera()
@Revealed var body: Picture?
var isPhotosLoaded = false
init() {
print("DataModel init")
Process {
await handleCameraPreviews()
}
}
deinit {
print("DataModel deinit")
}
func handleCameraPreviews() async {
let imageStream = digicam.previewStream
.map { $0.picture }
for await picture in imageStream {
Process { @MainActor in
// CIFilters to return...
body = picture
}
}
}
}
And the Digital camera:
class Digital camera: NSObject {
...
deinit() {
print("Digital camera > deinit")
}
non-public var addToPreviewStream: ((CIImage) -> Void)?
lazy var previewStream: AsyncStream<CIImage> = {
AsyncStream { continuation in
addToPreviewStream = { ciImage in
if !self.isPreviewPaused {
continuation.yield(ciImage)
}
}
}
}()
...
}
extension Digital camera: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer = sampleBuffer.imageBuffer else { return }
addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer))
}
}
This works effectively, till I seen the reminiscence utilization climbing each time I change to a 2nd view and again to the digicam once more.
Each time I navigate to the 2nd view, I by no means see "deinit" printed from both the DataModel() or the Digital camera(). And once I navigate again to the digicam view, I see the each the DataModel() and the Digital camera() print "init".
I admit I perceive little or no about reminiscence administration, however I think there’s a robust retain cycle.
Once I remark out the handleCameraPreviews() operate, the issue goes away. I think that is the place the retain cycle or another leak is coming from as a result of each objects init and deinit as anticipated.
I attempted updating the handleCameraPreviews() so as to add [weak self]:
init() {
print("DataModel init")
cameraTask = Process {
await digicam.begin()
}
previewTask = Process { [weak self] in
await self?.handleCameraPreviews()
}
}
func handleCameraPreviews() async {
let stream = digicam.previewStream
for await body in stream {
strive Process.checkCancellation()
await MainActor.run { [weak self] in
guard let self else { return }
self.body = body.picture
}
}
}
But it surely did not assist. Sadly, that is the place I am out of concepts attributable to limits of my data. Any recommendations?
Thanks!
Edit 01:
As requested, right here is the complete code:
Digital camera()
import AVFoundation
import CoreImage
import UIKit
import os.log
class Digital camera: NSObject {
non-public let captureSession = AVCaptureSession()
non-public var isCaptureSessionConfigured = false
non-public var deviceInput: AVCaptureDeviceInput?
non-public var photoOutput: AVCapturePhotoOutput?
non-public var videoOutput: AVCaptureVideoDataOutput?
non-public var sessionQueue: DispatchQueue!
non-public var captureSessionNotConfigured: Bool {
!isCaptureSessionConfigured
}
non-public var captureDevice: AVCaptureDevice? {
didSet {
guard let captureDevice = captureDevice else { return }
logger.debug("Utilizing seize system: (captureDevice.localizedName)")
}
}
var sessionIsRunning: Bool {
captureSession.isRunning
}
var sessionIsNotRunning: Bool {
!captureSession.isRunning
}
var isPreviewPaused = false
non-public var addToPreviewStream: ((CIImage) -> Void)?
lazy var previewStream: AsyncStream<CIImage> = {
AsyncStream { continuation in
addToPreviewStream = { ciImage in
if !self.isPreviewPaused {
continuation.yield(ciImage)
}
}
}
}()
override init() {
tremendous.init()
initialize()
}
deinit {
logger.debug(">>> Digital camera deinit")
}
non-public func initialize() {
logger.debug(">>> Digital camera Initialize")
sessionQueue = DispatchQueue(label: "session queue")
captureDevice = AVCaptureDevice.default(for: .video)
}
func begin() async {
let licensed = await checkAuthorization()
guard licensed else {
logger.error("Digital camera entry was not licensed.")
return
}
if isCaptureSessionConfigured && sessionIsNotRunning {
sessionQueue.async { [weak self] in
self?.captureSession.startRunning()
}
return
}
// not configured, so configure now
sessionQueue.async { [weak self] in
self?.configureCaptureSession { success in
guard success else { return }
self?.captureSession.startRunning()
}
}
}
func cease() {
guard isCaptureSessionConfigured else { return }
if sessionIsRunning {
sessionQueue.async {
self.captureSession.stopRunning()
}
}
}
non-public func configureCaptureSession(completionHandler: (_ success: Bool) -> Void) {
var success = false
self.captureSession.beginConfiguration()
defer {
self.captureSession.commitConfiguration()
completionHandler(success)
}
guard
let captureDevice = captureDevice,
let deviceInput = strive? AVCaptureDeviceInput(system: captureDevice)
else {
logger.error("Did not receive video enter.")
return
}
let photoOutput = AVCapturePhotoOutput()
captureSession.sessionPreset = AVCaptureSession.Preset.picture
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "VideoDataOutputQueue"))
guard captureSession.canAddInput(deviceInput) else {
logger.error("Unable so as to add system enter to seize session.")
return
}
guard captureSession.canAddOutput(photoOutput) else {
logger.error("Unable so as to add picture output to seize session.")
return
}
guard captureSession.canAddOutput(videoOutput) else {
logger.error("Unable so as to add video output to seize session.")
return
}
captureSession.addInput(deviceInput)
captureSession.addOutput(photoOutput)
captureSession.addOutput(videoOutput)
self.deviceInput = deviceInput
self.photoOutput = photoOutput
self.videoOutput = videoOutput
photoOutput.maxPhotoQualityPrioritization = .velocity
updateVideoOutputConnection()
isCaptureSessionConfigured = true
success = true
}
non-public func checkAuthorization() async -> Bool {
change AVCaptureDevice.authorizationStatus(for: .video) {
case .licensed:
logger.debug("Digital camera entry licensed.")
return true
case .notDetermined:
logger.debug("Digital camera entry not decided.")
sessionQueue.droop()
let standing = await AVCaptureDevice.requestAccess(for: .video)
sessionQueue.resume()
return standing
case .denied:
logger.debug("Digital camera entry denied.")
return false
case .restricted:
logger.debug("Digital camera library entry restricted.")
return false
@unknown default:
return false
}
}
non-public func updateVideoOutputConnection() {
if let videoOutput = videoOutput,
let videoOutputConnection = videoOutput.connection(with: .video) {
let angle = 90.0
if videoOutputConnection.isVideoRotationAngleSupported(angle) {
videoOutputConnection.videoRotationAngle = angle
}
}
}
func takePhoto() {
guard let photoOutput = self.photoOutput else { return }
sessionQueue.async {
var photoSettings = AVCapturePhotoSettings()
if photoOutput.availablePhotoCodecTypes.incorporates(.hevc) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
}
photoOutput.capturePhoto(with: photoSettings, delegate: self)
}
}
}
extension Digital camera: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto picture: AVCapturePhoto, error: Error?) {
}
}
extension Digital camera: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer = sampleBuffer.imageBuffer else { return }
addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer))
}
}
fileprivate let logger = Logger(subsystem: "com.apple.swiftplaygroundscontent.capturingphotos", class: "Digital camera")
DataModel()
import Mix
import AVFoundation
import SwiftUI
import os.log
closing class DataModel: ObservableObject {
let digicam = Digital camera()
@Revealed var body: Picture?
var isPhotosLoaded = false
init() {
print("DataModel init")
Process {
await handleCameraPreviews()
}
}
deinit {
print("DataModel deinit")
}
func handleCameraPreviews() async {
let imageStream = digicam.previewStream
.map { $0.picture }
for await picture in imageStream {
Process { @MainActor in
body = picture
}
}
}
}
fileprivate extension CIImage {
var picture: Picture? {
let ciContext = CIContext()
guard let cgImage = ciContext.createCGImage(self, from: self.extent) else { return nil }
return Picture(ornamental: cgImage, scale: 1, orientation: .up)
}
}
fileprivate extension Picture.Orientation {
init(_ cgImageOrientation: CGImagePropertyOrientation) {
change cgImageOrientation {
case .up: self = .up
case .upMirrored: self = .upMirrored
case .down: self = .down
case .downMirrored: self = .downMirrored
case .left: self = .left
case .leftMirrored: self = .leftMirrored
case .proper: self = .proper
case .rightMirrored: self = .rightMirrored
}
}
}
fileprivate let logger = Logger(subsystem: "com.apple.swiftplaygroundscontent.capturingphotos", class: "DataModel")
ContentView()
struct ContentView: View {
enum Display {
case digicam
case settings
}
@State var currentView = Display.digicam
var physique: some View {
VStack {
VStack {
change currentView {
case .digicam: CameraView()
case .settings: SettingsView()
}
}.body(maxHeight: .infinity)
HStack {
Button("Digital camera") {
currentView = .digicam
}.padding()
Button("Settings") {
currentView = .settings
}.padding()
}
}
}
}
CameraView()
struct CameraView: View {
@StateObject non-public var mannequin = DataModel()
var physique: some View {
VStack {
if let body = mannequin.body {
body
.resizable()
.scaledToFit()
}
}
.activity {
await mannequin.digicam.begin()
}
}
}
SettingsView()
struct SettingsView: View {
var physique: some View {
Textual content("Settings View")
}
}


