Convertissez CMSampleBuffer en .mov lors de la diffusion avec ReplayKit
Aug 19 2020
Dites-moi s'il vous plaît ce qui peut être incorrect avec mon code. J'essaie de diffuser mon écran dans un fichier (newFile.mov), puis de l'envoyer avec des données au serveur, mais il ne sauvegarde pas et après l'arrêt de la capture, le fichier est vide.
C'est mon code de BExtensionUpload qui capture la diffusion et l'enregistre dans un fichier dans le groupe où je l'obtiens et l'envoie au serveur
import ReplayKit
class SampleHandler: RPBroadcastSampleHandler {
let appIdentifier = "com.group.CY"
var videoWriterInput: AVAssetWriterInput!
var audioWriterInput: AVAssetWriterInput!
var videoWriter: AVAssetWriter!
var sessionAtSourceTime: CMTime!
var outputFileLocation: URL?
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
do {
outputFileLocation = videoFileLocation()
videoWriter = try AVAssetWriter(outputURL: outputFileLocation!, fileType: AVFileType.mov)
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 720,
AVVideoHeightKey : 1280,
AVVideoCompressionPropertiesKey : [
AVVideoAverageBitRateKey : 2300000,
],
])
videoWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
} else {
print("no input added")
}
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(audioWriterInput!) {
videoWriter.add(audioWriterInput!)
}
videoWriter.startWriting()
} catch let error {
debugPrint(error.localizedDescription)
}
}
override func broadcastFinished() {
super.broadcastFinished()
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
()
}
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
super.processSampleBuffer(sampleBuffer, with: sampleBufferType)
if writable,
sessionAtSourceTime == nil {
sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
}
switch sampleBufferType {
case .video:
if videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
case .audioApp:
if audioWriterInput.isReadyForMoreMediaData {
audioWriterInput?.append(sampleBuffer)
}
case .audioMic:
print("mic")
@unknown default:
print("unknown")
}
}
func videoFileLocation() -> URL? {
let fileManager = FileManager.default
do {
if let container = fileManager.containerURL(forSecurityApplicationGroupIdentifier: appIdentifier) {
let videoContainer = container.appendingPathComponent("Video")
try? fileManager.createDirectory(at: videoContainer, withIntermediateDirectories: false, attributes: nil)
let videoOutputUrl = videoContainer.appendingPathComponent("newFile").appendingPathExtension("mov")
if fileManager.fileExists(atPath: videoOutputUrl.path) {
try fileManager.removeItem(at: videoOutputUrl)
}
fileManager.createFile(atPath: videoOutputUrl.path, contents: nil, attributes: nil)
return videoOutputUrl
}
} catch {
print(error)
}
return nil
}
}
Réponses
1 AntonDanilov Aug 27 2020 at 07:00
Ce code fonctionne pour moi, mais en plus de mon répertoire dans les groupes d'applications, je devais utiliser PHPhotoLibrary. Le répertoire des groupes d'applications n'est pas le seul moyen de créer un fichier, vous pouvez utiliser le répertoire de votre choix. J'espère que cela peut aider quelqu'un.
import ReplayKit
import Photos
class SampleHandler: RPBroadcastSampleHandler {
let appIdentifier = "group.CY"
let fileManager = FileManager.default
var videoWriterInput: AVAssetWriterInput!
var microphoneWriterInput: AVAssetWriterInput!
var videoWriter: AVAssetWriter!
var sessionBeginAtSourceTime: CMTime!
var isRecording = false
var outputFileLocation: URL!
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
guard !isRecording else { return }
isRecording = true
sessionBeginAtSourceTime = nil
setUpWriter()
}
func setUpWriter() {
let width = UIScreen.main.bounds.width * 2
let height = UIScreen.main.bounds.height * 2
self.outputFileLocation = videoFileLocation()
// Add the video input
videoWriter = try? AVAssetWriter.init(outputURL: self.outputFileLocation, fileType: AVFileType.mp4)
let videoCompressionPropertys = [
AVVideoAverageBitRateKey: width * height * 10.1
]
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: videoCompressionPropertys
]
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
videoWriterInput.expectsMediaDataInRealTime = true
// Add the microphone input
var acl = AudioChannelLayout()
memset(&acl, 0, MemoryLayout<AudioChannelLayout>.size)
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
let audioOutputSettings: [String: Any] =
[ AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey : 44100,
AVNumberOfChannelsKey : 1,
AVEncoderBitRateKey : 64000,
AVChannelLayoutKey : Data(bytes: &acl, count: MemoryLayout<AudioChannelLayout>.size)]
microphoneWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioOutputSettings)
microphoneWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.canAdd(microphoneWriterInput) {
videoWriter.add(microphoneWriterInput)
}
videoWriter.startWriting()
}
override func broadcastFinished() {
guard isRecording else { return }
isRecording = false
sessionBeginAtSourceTime = nil
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
videoWriterInput.markAsFinished()
microphoneWriterInput.markAsFinished()
var finishedWriting = false
videoWriter.finishWriting {
PHPhotoLibrary.shared().performChanges({
PHAssetCollectionChangeRequest.creationRequestForAssetCollection(withTitle: "xxx")
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.outputFileLocation)
}) { completed, error in
if completed {
NSLog("Video \(self.outputFileLocation.path ?? "") has been moved to camera roll")
}
if error != nil {
NSLog("ERROR:::Cannot move the video \(self.outputFileLocation.path ?? "") to camera roll, error: \(error!.localizedDescription)")
}
finishedWriting = true
}
while finishedWriting == false {
// NSLog("DEBUG:::Waiting to finish writing...")
}
dispatchGroup.leave()
}
dispatchGroup.wait() // <= blocks the thread here
}
override func finishBroadcastWithError(_ error: Error) {
let e = error
print(e)
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
let writable = canWrite()
if writable, sessionBeginAtSourceTime == nil {
sessionBeginAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionBeginAtSourceTime!)
}
if writable {
switch sampleBufferType {
case .video:
if videoWriterInput.isReadyForMoreMediaData {
videoWriterInput.append(sampleBuffer)
}
case .audioApp:
print("audio")
case .audioMic:
if microphoneWriterInput.isReadyForMoreMediaData {
microphoneWriterInput.append(sampleBuffer)
}
@unknown default:
print("unknown")
}
}
}
func videoFileLocation() -> URL {
let documentsPath = fileManager.containerURL(forSecurityApplicationGroupIdentifier: appIdentifier)!
let videoOutputUrl = documentsPath
.appendingPathComponent("Library")
.appendingPathComponent("Caches")
.appendingPathComponent("mobile")
.appendingPathExtension("mp4")
do {
if fileManager.fileExists(atPath: videoOutputUrl.path) {
try fileManager.removeItem(at: videoOutputUrl)
}
} catch { print(error) }
return videoOutputUrl
}
func canWrite() -> Bool {
return videoWriter != nil && isRecording && videoWriter?.status == .writing
}
}