ios – AVFoundation Swift Toggling Front and Back Camera causes bug


I have a video recorder built with AVFoundation in Swift. The Recorder can take multiple clips and connect them into one video. For example you can start the video then stop it, then start it again and stop it; the final video will be both clips combined. If I take the first video using the front camera then stop the clip, toggle to the back camera and take another clip then a final clip is made successfully. However if I take one clip using both the front and back camera then the video fails and the clip doesn’t process. Why does toggling between the camera perspective during a video cause this problem?

import SwiftUI
import SwiftUI
import AVKit
import AVFoundation

class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate {
   @Published var session = AVCaptureSession()
   @Published var alert = false
   @Published var output = AVCaptureMovieFileOutput()
   @Published var preview: AVCaptureVideoPreviewLayer!
   @Published var isRecording: Bool = false
   @Published var recordedURLs: [URL] = []
   @Published var previewURL: URL?
   @Published var showPreview: Bool = false
   @Published var recordedDuration: CGFloat = 0
   @Published var maxDuration: CGFloat = 20
   var currentCameraPosition: AVCaptureDevice.Position = .back
   
   override init() {
       super.init()
       self.checkPermission()
       self.preview = AVCaptureVideoPreviewLayer(session: session)
       self.preview.videoGravity = .resizeAspectFill
   }

   func flipCamera() {
       // Create a discovery session to find all available video devices
       let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .unspecified)

       // Get all available video devices
       let videoDevices = discoverySession.devices

       // Check if there is more than one video device
       guard videoDevices.count > 1 else {
           return // If not, return early
       }

       // Get the current input
       guard let currentVideoInput = session.inputs.first as? AVCaptureDeviceInput else {
           return
       }

       // Get the new camera position
       let newCameraPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back

       // Find the new camera device
       if let newCamera = videoDevices.first(where: { $0.position == newCameraPosition }) {
           // Create a new video input
           do {
               let newVideoInput = try AVCaptureDeviceInput(device: newCamera)

               // Remove the current input
               session.removeInput(currentVideoInput)

               // Add the new input
               if session.canAddInput(newVideoInput) {
                   session.addInput(newVideoInput)
                   currentCameraPosition = newCameraPosition
               } else {
                   // Handle the case where adding the new input fails
                   print("Failed to add new camera input")
               }
           } catch {
               // Handle any errors that occur while creating the new input
               print("Error creating new camera input: \(error.localizedDescription)")
           }
       }
   }
   
   func checkPermission(){
       switch AVCaptureDevice.authorizationStatus(for: .video) {
       case .authorized:
           checkAudioPermission()
           return
       case .notDetermined:
           AVCaptureDevice.requestAccess(for: .video) { (status) in
               if status {
                   self.checkAudioPermission()
               }
           }
       case .denied:
           self.alert.toggle()
           return
       default:
           return
       }
   }
   
   func checkAudioPermission() {
       switch AVCaptureDevice.authorizationStatus(for: .audio) {
       case .authorized:
           setUp()
           return
       case .notDetermined:
           AVCaptureDevice.requestAccess(for: .audio) { (audioStatus) in
               if audioStatus {
                   self.setUp()
               }
           }
       case .denied:
           self.alert.toggle()
           return
       default:
           return
       }
   }
   
   func setUp(){
       do {
           self.session.beginConfiguration()
           let cameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
           let videoInput = try AVCaptureDeviceInput(device: cameraDevice!)
           let audioDevice = AVCaptureDevice.default(for: .audio)
           let audioInput = try AVCaptureDeviceInput(device: audioDevice!)

           if self.session.canAddInput(videoInput) && self.session.canAddInput(audioInput){
               self.session.addInput(videoInput)
               self.session.addInput(audioInput)
           }

           if self.session.canAddOutput(self.output){
               self.session.addOutput(self.output)
           }
           self.session.commitConfiguration()
       }
       catch{
           print(error.localizedDescription)
       }
   }
   
   func startRecording(){
       // MARK: Temporary URL for recording Video
       let tempURL = NSTemporaryDirectory() + "\(Date()).mov"
       output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
       isRecording = true
   }
   
   func stopRecording(){
       output.stopRecording()
       isRecording = false
   }
   
   func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
       if let error = error {
           print(error.localizedDescription)
           return
       }
       
       // CREATED SUCCESSFULLY
       print(outputFileURL)
       self.recordedURLs.append(outputFileURL)
       if self.recordedURLs.count == 1{
           self.previewURL = outputFileURL
           return
       }
       
       // CONVERTING URLs TO ASSETS
       let assets = recordedURLs.compactMap { url -> AVURLAsset in
           return AVURLAsset(url: url)
       }
       
       self.previewURL = nil
       // MERGING VIDEOS
       Task {
           await mergeVideos(assets: assets) { exporter in
               exporter.exportAsynchronously {
                   if exporter.status == .failed{
                       // HANDLE ERROR
                       print(exporter.error!)
                   }
                   else{
                       if let finalURL = exporter.outputURL{
                           print(finalURL)
                           DispatchQueue.main.async {
                               self.previewURL = finalURL
                           }
                       }
                   }
               }
           }
       }
   }
   
   func mergeVideos(assets: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
       
       let compostion = AVMutableComposition()
       var lastTime: CMTime = .zero
       
       guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
       guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
       
       for asset in assets {
           // Linking Audio and Video
           do {
               try await videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
               // Safe Check if Video has Audio
               if try await !asset.loadTracks(withMediaType: .audio).isEmpty {
                   try await audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
               }
           }
           catch {
               print(error.localizedDescription)
           }
           
           // Updating Last Time
           do {
               lastTime = try await CMTimeAdd(lastTime, asset.load(.duration))
           } catch {
               print(error.localizedDescription)
           }
       }
       
       // MARK: Temp Output URL
       let tempURL = URL(fileURLWithPath: NSTemporaryDirectory() + "Reel-\(Date()).mp4")
       
       // VIDEO IS ROTATED
       // BRINGING BACK TO ORIGNINAL TRANSFORM
       
       let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
       
       // MARK: Transform
       var transform = CGAffineTransform.identity
       transform = transform.rotated(by: 90 * (.pi / 180))
       transform = transform.translatedBy(x: 0, y: -videoTrack.naturalSize.height)
       layerInstructions.setTransform(transform, at: .zero)
       
       let instructions = AVMutableVideoCompositionInstruction()
       instructions.timeRange = CMTimeRange(start: .zero, duration: lastTime)
       instructions.layerInstructions = [layerInstructions]
       
       let videoComposition = AVMutableVideoComposition()
       videoComposition.renderSize = CGSize(width: videoTrack.naturalSize.height, height: videoTrack.naturalSize.width)
       videoComposition.instructions = [instructions]
       videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
       
       guard let exporter = AVAssetExportSession(asset: compostion, presetName: AVAssetExportPresetHighestQuality) else{return}
       exporter.outputFileType = .mp4
       exporter.outputURL = tempURL
       exporter.videoComposition = videoComposition
       completion(exporter)
   }
}


//IGNORE NOT IMPORTANT TO QUESTION
struct HomeStory: View {
   @StateObject var cameraModel = CameraViewModel()
   
   var body: some View {
       ZStack(alignment: .bottom) {
           CameraStoryView()
               .environmentObject(cameraModel)
               .clipShape(RoundedRectangle(cornerRadius: 30, style: .continuous))
               .padding(.top,10)
               .padding(.bottom,30)
           
           ZStack {
               Button {
                   if cameraModel.isRecording {
                       cameraModel.stopRecording()
                   } else {
                       cameraModel.startRecording()
                   }
               } label: {
                   if cameraModel.isRecording {
                       Circle().frame(width: 95, height: 95).foregroundStyle(.red).opacity(0.7)
                   } else {
                       ZStack {
                           Color.gray.opacity(0.001)
                           Circle().stroke(.white, lineWidth: 7).frame(width: 80, height: 80)
                       }.frame(width: 95, height: 95)
                   }
               }
               
               Button {
                   cameraModel.flipCamera()
               } label: {
                   Image(systemName: "arrow.triangle.2.circlepath.camera")
                       .font(.title)
                       .foregroundColor(.white)
                       .padding()
                       .background(Circle().fill(Color.black.opacity(0.7)))
               }.offset(x: -100)
               
               Button {
                   if let _ = cameraModel.previewURL {
                       cameraModel.showPreview.toggle()
                   }
               } label: {
                   if cameraModel.previewURL == nil && !cameraModel.recordedURLs.isEmpty {
                       ProgressView().tint(.black)
                   } else {
                       HStack {
                           Text("Preview")
                           Image(systemName: "chevron.right")
                       }
                       .padding()
                       .foregroundColor(.black).font(.body)
                       .background {
                           Capsule().foregroundStyle(.ultraThinMaterial)
                       }
                   }
               }
               .padding(.horizontal,20)
               .padding(.vertical,8)
               .frame(maxWidth: .infinity,alignment: .trailing)
               .padding(.trailing)
               .opacity((cameraModel.previewURL == nil && cameraModel.recordedURLs.isEmpty) || cameraModel.isRecording ? 0 : 1)
           }
           .frame(maxHeight: .infinity,alignment: .bottom)
           .padding(.bottom,10)
           .padding(.bottom,30)
           
           Button {
               cameraModel.recordedDuration = 0
               cameraModel.previewURL = nil
               cameraModel.recordedURLs.removeAll()
           } label: {
               Image(systemName: "xmark")
                   .font(.title)
                   .foregroundColor(.white)
           }
           .frame(maxWidth: .infinity,maxHeight: .infinity,alignment: .topLeading)
           .padding()
           .padding(.top)
           .opacity(!cameraModel.recordedURLs.isEmpty && cameraModel.previewURL != nil && !cameraModel.isRecording ? 1 : 0)
       }
       .overlay(content: {
           if let url = cameraModel.previewURL, cameraModel.showPreview {
               FinalPreview(url: url, showPreview: $cameraModel.showPreview)
                   .transition(.move(edge: .trailing))
           }
       })
       .animation(.easeInOut, value: cameraModel.showPreview)
       .preferredColorScheme(.dark)
   }
}
struct FinalPreview: View {
   var url: URL
   @Binding var showPreview: Bool
   
   var body: some View {
       GeometryReader { proxy in
           let size = proxy.size
           
           VideoPlayer(player: AVPlayer(url: url))
               .aspectRatio(contentMode: .fill)
               .frame(width: size.width, height: size.height)
               .clipShape(RoundedRectangle(cornerRadius: 30, style: .continuous))
               .overlay(alignment: .topLeading) {
                   Button {
                       showPreview.toggle()
                   } label: {
                       Label {
                           Text("Back")
                       } icon: {
                           Image(systemName: "chevron.left")
                       }
                       .foregroundColor(.white)
                   }
                   .padding(.leading)
                   .padding(.top,22)
               }
       }
   }
}

struct CameraStoryView: View {
   @EnvironmentObject var cameraModel: CameraViewModel
   var body: some View {
       
       GeometryReader { proxy in
           let size = proxy.size
           
           CameraPreview(size: size).environmentObject(cameraModel)
          
       }
       .onReceive(Timer.publish(every: 0.01, on: .main, in: .common).autoconnect()) { _ in
           if cameraModel.recordedDuration <= cameraModel.maxDuration && cameraModel.isRecording{
               cameraModel.recordedDuration += 0.01
           }
           
           if cameraModel.recordedDuration >= cameraModel.maxDuration && cameraModel.isRecording{
               cameraModel.stopRecording()
               cameraModel.isRecording = false
           }
       }
   }
}

struct CameraPreview: UIViewRepresentable {
   @EnvironmentObject var cameraModel : CameraViewModel
   var size: CGSize
   
   func makeUIView(context: Context) -> UIView {
       let view = UIView(frame: CGRect(origin: .zero, size: size))
       guard let preview = cameraModel.preview else { return view }

       preview.frame = view.bounds
       preview.videoGravity = .resizeAspectFill
       view.layer.addSublayer(preview)

       DispatchQueue.global(qos: .userInitiated).async {
           if !self.cameraModel.session.isRunning {
               self.cameraModel.session.startRunning()
           }
       }
       
       return view
   }
   
   func updateUIView(_ uiView: UIView, context: Context) { }
}

Latest articles

spot_imgspot_img

Related articles

Leave a reply

Please enter your comment!
Please enter your name here

spot_imgspot_img