mac-virtualcam: Use IOSurface to share output with virtual cameras

This change updates the virtual camera implementation on macOS to
utilize IOSurface to share the output feed with the virtual cameras.

By using IOSurface, we remove the need for copying the frames across
multiple buffers, since they can be shared across Mach connections using
zero-copy.
This commit is contained in:
Fabian Mastenbroek 2022-02-04 21:32:04 +01:00
parent 9e29be5c79
commit aa3781fde9
No known key found for this signature in database
GPG Key ID: 405FC6F81F0A7B85
10 changed files with 161 additions and 87 deletions

View File

@ -4,8 +4,9 @@ find_library(COCOA Cocoa)
find_library(COREMEDIA CoreMedia)
find_library(COREMEDIAIO CoreMediaIO)
find_library(COREVIDEO CoreVideo)
find_library(IOSURFACE IOSurface)
mark_as_advanced(COCOA COREMEDIA COREMEDIAIO COREVIDEO)
mark_as_advanced(COCOA COREMEDIA COREMEDIAIO COREVIDEO IOSURFACE)
add_library(mac-dal-plugin MODULE)
add_library(OBS::mac-dal-plugin ALIAS mac-dal-plugin)
@ -37,8 +38,9 @@ target_include_directories(
target_compile_options(mac-dal-plugin PRIVATE -fobjc-arc -fobjc-weak)
target_link_libraries(mac-dal-plugin PRIVATE ${COCOA} ${COREMEDIA}
${COREMEDIAIO} ${COREVIDEO})
target_link_libraries(
mac-dal-plugin PRIVATE ${COCOA} ${COREMEDIA} ${COREMEDIAIO} ${COREVIDEO}
${IOSURFACE})
set(MACOSX_PLUGIN_BUNDLE_TYPE "BNDL")
target_sources(mac-dal-plugin PRIVATE placeholder.png)

View File

@ -6,16 +6,16 @@
//
#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@protocol MachClientDelegate
- (void)receivedFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData;
- (void)receivedPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
- (void)receivedStop;
@end

View File

@ -101,29 +101,37 @@
break;
case MachMsgIdFrame:
VLog(@"Received frame message");
if (components.count >= 6) {
CGFloat width;
[components[0] getBytes:&width length:sizeof(width)];
CGFloat height;
[components[1] getBytes:&height length:sizeof(height)];
if (components.count >= 4) {
NSMachPort *framePort = (NSMachPort *)components[0];
IOSurfaceRef surface = IOSurfaceLookupFromMachPort(
[framePort machPort]);
CVPixelBufferRef frame;
CVPixelBufferCreateWithIOSurface(kCFAllocatorDefault,
surface, NULL, &frame);
uint64_t timestamp;
[components[2] getBytes:&timestamp
[components[1] getBytes:&timestamp
length:sizeof(timestamp)];
VLog(@"Received frame data: %fx%f (%llu)", width,
height, timestamp);
NSData *frameData = components[3];
VLog(@"Received frame data: %zux%zu (%llu)",
CVPixelBufferGetWidth(frame),
CVPixelBufferGetHeight(frame), timestamp);
uint32_t fpsNumerator;
[components[4] getBytes:&fpsNumerator
[components[2] getBytes:&fpsNumerator
length:sizeof(fpsNumerator)];
uint32_t fpsDenominator;
[components[5] getBytes:&fpsDenominator
[components[3] getBytes:&fpsDenominator
length:sizeof(fpsDenominator)];
[self.delegate
receivedFrameWithSize:NSMakeSize(width, height)
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator
frameData:frameData];
[self.delegate receivedPixelBuffer:frame
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator];
CVPixelBufferRelease(frame);
CFRelease(surface);
}
break;
case MachMsgIdStop:

View File

@ -203,19 +203,21 @@ typedef enum {
#pragma mark - MachClientDelegate
- (void)receivedFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData
- (void)receivedPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
size_t width = CVPixelBufferGetWidth(frame);
size_t height = CVPixelBufferGetHeight(frame);
dispatch_sync(_stateQueue, ^{
if (_state == PlugInStateWaitingForServer) {
NSUserDefaults *defaults =
[NSUserDefaults standardUserDefaults];
[defaults setInteger:size.width
[defaults setInteger:(long)width
forKey:kTestCardWidthKey];
[defaults setInteger:size.height
[defaults setInteger:(long)height
forKey:kTestCardHeightKey];
[defaults setDouble:(double)fpsNumerator /
(double)fpsDenominator
@ -234,11 +236,10 @@ typedef enum {
dispatch_time(DISPATCH_TIME_NOW, 5.0 * NSEC_PER_SEC),
5.0 * NSEC_PER_SEC, (1ull * NSEC_PER_SEC) / 10);
[self.stream queueFrameWithSize:size
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator
frameData:frameData];
[self.stream queuePixelBuffer:frame
timestamp:timestamp
fpsNumerator:fpsNumerator
fpsDenominator:fpsDenominator];
}
- (void)receivedStop

View File

@ -18,6 +18,7 @@
// along with obs-mac-virtualcam. If not, see <http://www.gnu.org/licenses/>.
#import "OBSDALObjectStore.h"
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@ -35,11 +36,10 @@ NS_ASSUME_NONNULL_BEGIN
- (void)stopServingDefaultFrames;
- (void)queueFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData;
- (void)queuePixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
@end

View File

@ -349,11 +349,10 @@
}
}
- (void)queueFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameData:(NSData *)frameData
- (void)queuePixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
if (CMSimpleQueueGetFullness(self.queue) >= 1.0) {
DLog(@"Queue is full, bailing out");
@ -374,9 +373,34 @@
self.sequenceNumber = CMIOGetNextSequenceNumber(self.sequenceNumber);
CMSampleBufferRef sampleBuffer;
CMSampleBufferCreateFromData(size, timingInfo, self.sequenceNumber,
frameData, &sampleBuffer);
CMSimpleQueueEnqueue(self.queue, sampleBuffer);
// Generate the video format description from that pixel buffer
CMVideoFormatDescriptionRef format;
err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault,
frame, &format);
if (err != noErr) {
DLog(@"CMVideoFormatDescriptionCreateForImageBuffer err %d",
err);
return;
}
err = CMIOSampleBufferCreateForImageBuffer(
kCFAllocatorDefault, frame, format, &timingInfo,
self.sequenceNumber, kCMIOSampleBufferNoDiscontinuities,
&sampleBuffer);
CFRelease(format);
if (err != noErr) {
DLog(@"CMIOSampleBufferCreateForImageBuffer err %d", err);
return;
}
err = CMSimpleQueueEnqueue(self.queue, sampleBuffer);
if (err != noErr) {
DLog(@"CMSimpleQueueEnqueue err %d", err);
return;
}
// Inform the clients that the queue has been altered
if (self.alteredProc != NULL) {

View File

@ -1,8 +1,10 @@
project(mac-virtualcam)
find_library(APPKIT AppKit)
find_library(COREVIDEO CoreVideo)
find_library(IOSURFACE IOSurface)
mark_as_advanced(APPKIT)
mark_as_advanced(APPKIT COREVIDEO IOSURFACE)
add_library(mac-virtualcam MODULE)
add_library(OBS::virtualcam ALIAS mac-virtualcam)
@ -15,8 +17,9 @@ target_include_directories(
mac-virtualcam
PRIVATE "$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>/../common")
target_link_libraries(mac-virtualcam PRIVATE OBS::libobs OBS::frontend-api
${APPKIT})
target_link_libraries(
mac-virtualcam PRIVATE OBS::libobs OBS::frontend-api ${APPKIT} ${COREVIDEO}
${IOSURFACE})
target_compile_features(mac-virtualcam PRIVATE cxx_deleted_functions
cxx_rvalue_references cxx_std_17)

View File

@ -6,6 +6,7 @@
//
#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>
NS_ASSUME_NONNULL_BEGIN
@ -16,11 +17,10 @@ NS_ASSUME_NONNULL_BEGIN
/*!
Will eventually be used for sending frames to all connected clients
*/
- (void)sendFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameBytes:(uint8_t *)frameBytes;
- (void)sendPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator;
- (void)stop;

View File

@ -7,6 +7,7 @@
#import "OBSDALMachServer.h"
#include <obs-module.h>
#include <CoreVideo/CoreVideo.h>
#include "MachProtocol.h"
#include "Defines.h"
@ -121,23 +122,16 @@
[self.clientPorts minusSet:removedPorts];
}
- (void)sendFrameWithSize:(NSSize)size
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
frameBytes:(uint8_t *)frameBytes
- (void)sendPixelBuffer:(CVPixelBufferRef)frame
timestamp:(uint64_t)timestamp
fpsNumerator:(uint32_t)fpsNumerator
fpsDenominator:(uint32_t)fpsDenominator
{
if ([self.clientPorts count] <= 0) {
return;
}
@autoreleasepool {
CGFloat width = size.width;
NSData *widthData = [NSData dataWithBytes:&width
length:sizeof(width)];
CGFloat height = size.height;
NSData *heightData = [NSData dataWithBytes:&height
length:sizeof(height)];
NSData *timestampData = [NSData
dataWithBytes:&timestamp
length:sizeof(timestamp)];
@ -148,19 +142,20 @@
dataWithBytes:&fpsDenominator
length:sizeof(fpsDenominator)];
// NOTE: I'm not totally sure about the safety of dataWithBytesNoCopy in this context.
// Seems like there could potentially be an issue if the frameBuffer went away before the
// mach message finished sending. But it seems to be working and avoids a memory copy. Alternately
// we could do something like
// NSData *frameData = [NSData dataWithBytes:(void *)frameBytes length:size.width * size.height * 2];
NSData *frameData = [NSData
dataWithBytesNoCopy:(void *)frameBytes
length:size.width * size.height * 2
freeWhenDone:NO];
NSPort *framePort = [NSMachPort
portWithMachPort:IOSurfaceCreateMachPort(
CVPixelBufferGetIOSurface(
frame))];
if (!framePort) {
blog(LOG_ERROR,
"unable to allocate mach port for pixel buffer");
return;
}
[self sendMessageToClientsWithMsgId:MachMsgIdFrame
components:@[
widthData, heightData,
timestampData, frameData,
framePort, timestampData,
fpsNumeratorData,
fpsDenominatorData
]];

View File

@ -182,14 +182,55 @@ static void virtualcam_output_raw_video(void *data, struct video_data *frame)
(videoInfo.output_width * 2), frame->linesize[0]);
}
CGFloat width = videoInfo.output_width;
CGFloat height = videoInfo.output_height;
size_t width = videoInfo.output_width;
size_t height = videoInfo.output_height;
[sMachServer sendFrameWithSize:NSMakeSize(width, height)
timestamp:frame->timestamp
fpsNumerator:videoInfo.fps_num
fpsDenominator:videoInfo.fps_den
frameBytes:outData];
NSDictionary *pbAttr = @{(id)kCVPixelBufferIOSurfacePropertiesKey: @{}};
CVPixelBufferRef frameRef = NULL;
CVReturn status = CVPixelBufferCreate(NULL, width, height,
kCVPixelFormatType_422YpCbCr8,
(__bridge CFDictionaryRef)pbAttr,
&frameRef);
if (status != kCVReturnSuccess) {
blog(LOG_ERROR, "unable to allocate pixel buffer (error %d)",
status);
return;
}
// Copy memory into the pixel buffer
CVPixelBufferLockBaseAddress(frameRef, 0);
uint8_t *dest =
(uint8_t *)CVPixelBufferGetBaseAddressOfPlane(frameRef, 0);
uint8_t *src = outData;
size_t destBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(frameRef, 0);
size_t srcBytesPerRow = frame->linesize[0];
// Sometimes CVPixelBufferCreate will create a pixelbuffer that's a different
// size than necessary to hold the frame (probably for some optimization reason).
// If that is the case this will do a row-by-row copy into the buffer.
if (destBytesPerRow == srcBytesPerRow) {
memcpy(dest, src, destBytesPerRow * height);
} else {
for (int line = 0; line < height; line++) {
memcpy(dest, src, srcBytesPerRow);
src += srcBytesPerRow;
dest += destBytesPerRow;
}
}
memcpy(dest, outData, srcBytesPerRow * height);
CVPixelBufferUnlockBaseAddress(frameRef, 0);
// Share pixel buffer with clients
[sMachServer sendPixelBuffer:frameRef
timestamp:frame->timestamp
fpsNumerator:videoInfo.fps_num
fpsDenominator:videoInfo.fps_den];
CVPixelBufferRelease(frameRef);
}
struct obs_output_info virtualcam_output_info = {