Open xHeinrich opened 1 month ago
Any update on this? facing similar issue
"react": "18.2.0",
"react-native": "0.74.1",
"react-native-vision-camera": "^4.0.3"
Think it must be some timing issue with the asset writer but I don't know enough swift to find actual issue.
same here
This is my naive workaround patch for 3.9.2. Only tested with iPhone 12 Pro. I'm not an expert on iOS and swift.
Even though we pause recording, captureSession
's clock keeps going (we cannot stop captureSession
because we should show the camera preview to the users). It seems that AVAssetWriter
only considers the timestamp recorded in CMSampleBuffer
s. The idea is to adjust timestamp in the buffer.
This is a demo video same as the author did.
diff --git a/ios/Core/CameraSession+Video.swift b/ios/Core/CameraSession+Video.swift
index 00ff941b1d4cee15323f1f960a19a14613acab01..69e57e4092d99104793b994e9273a37dd301c18f 100644
--- a/ios/Core/CameraSession+Video.swift
+++ b/ios/Core/CameraSession+Video.swift
@@ -157,11 +157,12 @@ extension CameraSession {
func pauseRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
- guard self.recordingSession != nil else {
+ guard let recordingSession = self.recordingSession else {
// there's no active recording!
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = false
+ try recordingSession.pause(clock: self.captureSession.clock)
return nil
}
}
@@ -173,11 +174,12 @@ extension CameraSession {
func resumeRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
- guard self.recordingSession != nil else {
+ guard let recordingSession = self.recordingSession else {
// there's no active recording!
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = true
+ try recordingSession.resume(clock: self.captureSession.clock)
return nil
}
}
diff --git a/ios/Core/RecordingSession.swift b/ios/Core/RecordingSession.swift
index 85e9c622573143bd38f0b0ab6f81ad2f40e03cc3..8c4836c97b562bbda362c14f314a0ce96f113d2a 100644
--- a/ios/Core/RecordingSession.swift
+++ b/ios/Core/RecordingSession.swift
@@ -33,6 +33,8 @@ class RecordingSession {
private var startTimestamp: CMTime?
private var stopTimestamp: CMTime?
+ private var pauseTimestamp: CMTime?
+ private var pauseTimestampOffset: CMTime?
private var lastWrittenTimestamp: CMTime?
@@ -67,7 +69,12 @@ class RecordingSession {
let startTimestamp = startTimestamp else {
return 0.0
}
- return (lastWrittenTimestamp - startTimestamp).seconds
+
+ if let pauseTimestampOffset = pauseTimestampOffset {
+ return (lastWrittenTimestamp - startTimestamp - pauseTimestampOffset).seconds
+ } else {
+ return (lastWrittenTimestamp - startTimestamp).seconds
+ }
}
init(url: URL,
@@ -158,6 +165,8 @@ class RecordingSession {
// Start the sesssion at the given time. Frames with earlier timestamps (e.g. late frames) will be dropped.
assetWriter.startSession(atSourceTime: currentTime)
startTimestamp = currentTime
+ pauseTimestamp = nil
+ pauseTimestampOffset = nil
ReactLogger.log(level: .info, message: "Started RecordingSession at time: \(currentTime.seconds)")
if audioWriter == nil {
@@ -195,6 +204,56 @@ class RecordingSession {
}
}
+ /**
+ Record pause timestamp to calculate timestamp offset using the current time of the provided synchronization clock.
+ The clock must be the same one that was passed to start() method.
+ */
+ func pause(clock: CMClock) throws {
+ lock.wait()
+ defer {
+ lock.signal()
+ }
+
+ let currentTime = CMClockGetTime(clock)
+ ReactLogger.log(level: .info, message: "Pausing Asset Writer(s)...")
+
+ guard pauseTimestamp == nil else {
+ ReactLogger.log(level: .error, message: "pauseTimestamp is already non-nil")
+ return
+ }
+
+ pauseTimestamp = currentTime
+ }
+
+ /**
+ Update pause timestamp offset using the current time of the provided synchronization clock.
+ The clock must be the same one that was passed to start() method.
+ */
+ func resume(clock: CMClock) throws {
+ lock.wait()
+ defer {
+ lock.signal()
+ }
+
+ let currentTime = CMClockGetTime(clock)
+ ReactLogger.log(level: .info, message: "Resuming Asset Writer(s)...")
+
+ guard let pauseTimestamp = pauseTimestamp else {
+ ReactLogger.log(level: .error, message: "Tried resume but recording has not been paused")
+ return
+ }
+
+ let pauseOffset = currentTime - pauseTimestamp
+ self.pauseTimestamp = nil
+ if let currentPauseTimestampOffset = pauseTimestampOffset {
+ pauseTimestampOffset = currentPauseTimestampOffset + pauseOffset
+ ReactLogger.log(level: .info, message: "Current pause offset is \(pauseTimestampOffset!.seconds)")
+ } else {
+ pauseTimestampOffset = pauseOffset
+ ReactLogger.log(level: .info, message: "Current pause offset is \(pauseTimestampOffset!.seconds)")
+ }
+ }
+
/**
Appends a new CMSampleBuffer to the Asset Writer.
- Use clock to specify the CMClock instance this CMSampleBuffer uses for relative time
@@ -238,12 +297,32 @@ class RecordingSession {
}
// 3. Actually write the Buffer to the AssetWriter
+ let buf: CMSampleBuffer
+ if let pauseTimestampOffset = pauseTimestampOffset {
+ // let newTime = timestamp - pauseTimestampOffset
+ var count: CMItemCount = 0
+ CMSampleBufferGetSampleTimingInfoArray(buffer, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count)
+ var info = [CMSampleTimingInfo](repeating: CMSampleTimingInfo(duration: CMTimeMake(value: 0, timescale: 0), presentationTimeStamp: CMTimeMake(value: 0, timescale: 0), decodeTimeStamp: CMTimeMake(value: 0, timescale: 0)), count: count)
+ CMSampleBufferGetSampleTimingInfoArray(buffer, entryCount: count, arrayToFill: &info, entriesNeededOut: &count)
+
+ for i in 0..<count {
+ info[i].decodeTimeStamp = info[i].decodeTimeStamp - pauseTimestampOffset
+ info[i].presentationTimeStamp = info[i].presentationTimeStamp - pauseTimestampOffset
+ }
+
+ var out: CMSampleBuffer?
+ CMSampleBufferCreateCopyWithNewTiming(allocator: nil, sampleBuffer: buffer, sampleTimingEntryCount: count, sampleTimingArray: &info, sampleBufferOut: &out)
+ buf = out!
+ } else {
+ buf = buffer
+ }
let writer = getAssetWriter(forType: bufferType)
guard writer.isReadyForMoreMediaData else {
ReactLogger.log(level: .warning, message: "\(bufferType) AssetWriter is not ready for more data, dropping this Frame...")
return
}
- writer.append(buffer)
+ writer.append(buf)
+ ReactLogger.log(level: .info, message: "append \(bufferType) Buffer (at \(timestamp.seconds) seconds)...")
lastWrittenTimestamp = timestamp
// 4. If we failed to write the frames, stop the Recording
My concerns on this workaround are:
Found relevant PR but it's closed. https://github.com/mrousavy/react-native-vision-camera/pull/1546
What's happening?
When taking a video and pausing/resuming the video on iOS, the audio and video are chopped into different segments rather than one continuous video. For example, I start recording and say
one two three
pause recording and say four five six
resume recording and say seven eight nine
stop recording
Example video output:
https://github.com/mrousavy/react-native-vision-camera/assets/7674587/c50c74ab-23be-404e-a81f-22128227475a
Reproduceable Code
Full repo with a minimal reproduction: https://github.com/xHeinrich/vision-camera-reproduction
Relevant log output
Camera Device
Device
iPhone 13 Pro
VisionCamera Version
4.0.0
Can you reproduce this issue in the VisionCamera Example app?
Yes, I can reproduce the same issue in the Example app here
Additional information