AVAssetWriter No se puede grabar audio con video | Estrellarse

Nov 25 2020

Estoy tratando de capturar fotogramas de video / audio, CMSampleBufferpero no puedo obtener una grabación de video adecuada.

Salida esperada: un archivo de video en .mp4formato que tiene tanto audio (del micrófono) como cuadros de video.

Salida actual: un directorio vacío / un archivo de video sin audio.

Se bloquea al correr :Media type of sample buffer must match receiver's media type ("soun")

Probé casi todo lo que está disponible en línea para solucionar este problema. Se acerca una fecha límite y me estoy tirando del pelo tratando de averiguar qué está pasando exactamente. Cualquier ayuda / consejo es muy apreciada.

A continuación se muestra la fuente.

CameraController.swift

class CameraController: UIViewController, SFrameCaptureDelegate {
    
    override func viewDidLoad() {
        super.viewDidLoad()
        setupUI()
        
        assetWriter = AssetManager(filename: UUID().uuidString.appending(".mp4"))
        frameBuffer.delegate = self
        frameBuffer.startSession()
        
    }
    
    
    var previewView: PreviewView = {
        let instance = PreviewView()
        return instance
    }()
    
    var frameBuffer = FrameCapture(type: .AudioVideo)
    
    var captureButton: UIButton = {
        let instance = UIButton()
        instance.setTitle("Capture", for: .normal)
        instance.backgroundColor = .white
        return instance
    }()
 
    // if the user is recording the frames from the phone
    var frameCaptureRunning = false
    
    var assetWriter : AssetManager!
    
    var videoDirectoryPath = SFileManager.shared.getDocumentDirectory()
    
    func setupUI() {
        
        view.addSubview(previewView)
        previewView.top(to: view)
        previewView.left(to: view)
        previewView.right(to: view)
        previewView.height(view.frame.height)
        
        previewView.session = frameBuffer.session
        
        
        view.addSubview(captureButton)
        captureButton.size(CGSize(width: 100, height: 100))
        captureButton.centerX(to: view)
        captureButton.bottom(to: view, offset: -20)
        captureButton.addTarget(self, action: #selector(startpic), for: .touchDown)
        captureButton.addTarget(self, action: #selector(stopic), for: .touchUpInside)
    }
    
    @objc func startpic() {
        frameCaptureRunning = true
        assetWriter.isRecording = true
    }
    
    @objc func stopic() {
        frameCaptureRunning = false
        assetWriter.isRecording = false
        assetWriter.finish {
            DispatchQueue.main.async {
                let activity = UIActivityViewController(activityItems: [self.assetWriter.url!], applicationActivities: nil)
                self.present(activity, animated: true, completion: nil)
            }
            print("This -- ",self.assetWriter.url.path)
            do {
                let attr = try FileManager.default.attributesOfItem(atPath: self.assetWriter.url.path)
                let fileSize = attr[FileAttributeKey.size] as! UInt64
                print("H264 file size = \(fileSize)")

                DispatchQueue.main.async {
                    let player = AVPlayer(url: self.assetWriter.url)
                    let playerLayer = AVPlayerLayer(player: player)
                    playerLayer.videoGravity = .resizeAspectFill
                    playerLayer.frame = self.view.bounds
                    playerLayer.backgroundColor = UIColor.red.cgColor
                    self.view.layer.addSublayer(playerLayer)
                    player.play()
                }
            }catch{
                print("issues with finishing")
            }
        }
        
        
    }
    
    func capturedFrame(buffers: CMSampleBuffer) {
        
        if !frameCaptureRunning { return }
        assetWriter.write(buffer: buffers)
        
    }
    
}

FrameCapture.swift

protocol SFrameCaptureDelegate: class {
    func capturedFrame(buffers: CMSampleBuffer)
}

class FrameCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    
    init(type: SessionType) {
        super.init()
        print("SFC - Frame Buffers initialized with Config - ", type.self)
        sessionType = type
    }
    
    func startSession() {
        print("SFC - Frame Buffers Session Starting")
        sessionQueue.async {
            self.configureSession(type: self.sessionType)
            self.session.startRunning()
        }
    }
    
    weak var delegate: SFrameCaptureDelegate?
    
    enum SessionSetupResult {
        case success
        case notAuthorized
        case configurationFailed
    }
    
    enum SessionType {
        case Audio
        case Video
        case AudioVideo
    }
    
    let session = AVCaptureSession()
    let sessionQueue = DispatchQueue(label: "sessionQueue", qos: .userInitiated)
    let videoQueue = DispatchQueue(label: "videoQueue", qos: .userInitiated)
    let audioQueue = DispatchQueue(label: "audioQueue", qos: .userInitiated)
    var setupResult: SessionSetupResult = .success
    var sessionType: SessionType = .Video
    
    @objc dynamic var videoDeviceInput: AVCaptureDeviceInput!
    let videoOutput = AVCaptureVideoDataOutput()
    let audioOutput = AVCaptureAudioDataOutput()
    var photoQualityPrioritizationMode: AVCapturePhotoOutput.QualityPrioritization = .balanced
    
///  MARK: SessionConfig
    func configureSession(type: SessionType) {
        
        if setupResult != .success { return }
        
        session.beginConfiguration()
        session.sessionPreset = .high
        
        do {
            var defaultVideoDevice: AVCaptureDevice?
            
            if let dualCameraDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) {
                defaultVideoDevice = dualCameraDevice
            } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back){
                defaultVideoDevice = backCameraDevice
            } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front){
                defaultVideoDevice = frontCameraDevice
            }
            
            guard let videoDevice = defaultVideoDevice else {
                print("CAM - Camera unavailable")
                setupResult = .configurationFailed
                session.commitConfiguration()
                return
            }
            
            let videoInputDevice = try AVCaptureDeviceInput(device: videoDevice)
            
            if session.canAddInput(videoInputDevice) {
                session.addInput(videoInputDevice)
                videoDeviceInput = videoInputDevice
            } else {
                print("CAM - Couldn't add input to the session")
                setupResult = .configurationFailed
                session.commitConfiguration()
                return
            }
        } catch {
            print("CAM - Couldn't create device input. Error - ", error.localizedDescription)
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        if sessionType == .AudioVideo {
            do {
                let audioDevice = AVCaptureDevice.default(for: .audio)
                let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
                print("SFC - in audio device input")
                if session.canAddInput(audioDeviceInput) {
                    session.addInput(audioDeviceInput)
                } else { print("CAM - Couldn't add audio input device to session.") }
            } catch { print("couldn't create audio input device. Error - ",error.localizedDescription) }
        }
        
        
        videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
        if session.canAddOutput(videoOutput) {
            session.addOutput(videoOutput)
            photoQualityPrioritizationMode = .balanced
        } else {
            print("Could not add photo output to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        if sessionType == .AudioVideo {
            audioOutput.setSampleBufferDelegate(self, queue: audioQueue)
            if session.canAddOutput(audioOutput) {
                session.addOutput(audioOutput)
            } else {
                print("Couldn't add audio output")
                setupResult = .configurationFailed
                session.commitConfiguration()
            }
        }
        
        videoOutput.connections.first?.videoOrientation = .portrait
        videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ]
        videoOutput.alwaysDiscardsLateVideoFrames = true
        
        session.commitConfiguration()

    }
    
/// MARK: CMSampleBufferDelegate
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        self.delegate?.capturedFrame(buffers: sampleBuffer)
    }
    
}

AssetManager.swift

class AssetManager: NSObject {
    
    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    var url: URL!
    
    let writerQueue = DispatchQueue(label: "writerQueue", qos: .utility)
    
    var isRecording = false
    var video_frames_written = false
    
    init(filename: String) {
        super.init()
        self.videoDirectory.appendPathComponent(filename)
        self.url = self.videoDirectory
        
    }

    private var videoDirectory = SFileManager.shared.getDocumentDirectory()

    
    private func setupWriter() {
        
        SFileManager.shared.clearPreviousFiles(withPath: videoDirectory.path)
        SFileManager.shared.createNewDirectory(withPath: videoDirectory.path)
        printLog(item: self.videoDirectory)
        
        
        self.assetWriter = try? AVAssetWriter(outputURL: self.videoDirectory, fileType: AVFileType.mp4)
        
        let videoOutputSettings = [
            AVVideoCodecKey: AVVideoCodecType.h264,
            AVVideoHeightKey: 1280,
            AVVideoWidthKey:720
        ] as [String : Any]
        
        
        self.videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
        self.videoInput?.expectsMediaDataInRealTime = true
        if let videoInput = self.videoInput, (self.assetWriter?.canAdd(videoInput))! {
            self.assetWriter?.add(videoInput)
        }
        
        
        let audioOutputSettings = [
            AVFormatIDKey: kAudioFormatMPEG4AAC,
            AVNumberOfChannelsKey: 1,
            AVSampleRateKey: 44100,
            AVEncoderBitRateKey: 64000
        ] as [String: Any]
        
        
        self.audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
        self.audioInput?.expectsMediaDataInRealTime = true
        if let audioInput = self.audioInput, (self.assetWriter?.canAdd(audioInput))! {
            self.assetWriter?.add(audioInput)
            printDone(item: "Asset writer added, \(String(describing: self.audioInput))")
        } else {
            printError(item: "No audio Input")
        }
        
        
    }
    
    
    public func write(buffer: CMSampleBuffer) {
        writerQueue.sync {
            
            if assetWriter == nil { self.setupWriter() }

            if self.assetWriter?.status == .unknown {
                self.assetWriter?.startWriting()
                self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(buffer))
                printDone(item: "Started AssetWriter")
            }

            if self.assetWriter?.status == .failed {
                printError(item: "Asset Writer Failed with Error: \(String(describing: self.assetWriter?.error))")
            }

            if CMSampleBufferDataIsReady(buffer) {

                if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
                    videoInput.append(buffer)
                }
                
                if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
                    audioInput.append(buffer) // Crashes at this line
                }
            }
        }
    }
    
    public func finish(completion: @escaping (() -> Void)) {
        writerQueue.async {
            self.assetWriter?.finishWriting(completionHandler: { [self] in
                printDone(item: "Finished Writing")
                completion()
            })
        }
    }
}

Respuestas

1 RhythmicFistman Nov 27 2020 at 06:02

Está escribiendo un búfer de video en su audioInputy, dependiendo de cómo lleguen los búferes, también puede escribir un búfer de audio en su videoInput.

En su caso, los CMSampleBuffercorreos electrónicos contienen audio o video, por lo que agrega búfer de audio audioInputy búfer de video videoInput.

Se puede distinguir los dos tipos de memoria intermedia mediante la comparación de la outputde captureOutput:didOutput:a su audioInputy videoOutput, o bien consultando en la memoria intermedia de CMSampleBufferGetFormatDescription()'s CMFormatDescriptionGetMediaType(), pero eso es más complicado.