nonocast / me

记录和分享技术的博客
http://nonocast.cn
MIT License
20 stars 0 forks source link

学习 C++ (Part 13: 命令行打开摄像头) #243

Open nonocast opened 2 years ago

nonocast commented 2 years ago

参考obs, 通过c++/oc混合打开摄像头

main.cpp

extern "C" {
#include "capture.h"
}
#include <iostream>
#include <pthread.h>
#include <stdbool.h>
#include <time.h>
#include <unistd.h>

pthread_t capture_thread;
void *worker(void *args);

int main() {
  std::cout << "start" << std::endl;

  pthread_create(&capture_thread, NULL, &worker, NULL);

  while (true) {
    sleep(1);
  }
  return 0;
}

void *worker(void *arg) {
  init();
  return NULL;
}

capture.h

#ifndef CAPTURE_H
#define CAPTURE_H

void init();

#endif

capture.m

#import <AVFoundation/AVFoundation.h>
#import <AppKit/AppKit.h>
#import <CoreFoundation/CoreFoundation.h>
#import <CoreImage/CoreImage.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#include <stdio.h>

// https://gist.github.com/bellbind/6954679

struct av_capture;

@interface OBSAVCaptureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
@public
  struct av_capture *capture;
}
- (void)captureOutput:(AVCaptureOutput *)out didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
@end

@implementation OBSAVCaptureDelegate {
  int index;
}

- (void)captureOutput:(AVCaptureOutput *)out didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  // UNUSED_PARAMETER(out);
  // UNUSED_PARAMETER(sampleBuffer);
  // UNUSED_PARAMETER(connection);
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  // UNUSED_PARAMETER(captureOutput);
  // UNUSED_PARAMETER(connection);

  NSLog(@"frame: %003d", ++index);

  CMItemCount count = CMSampleBufferGetNumSamples(sampleBuffer);
  if (count < 1 || !capture)
    return;

  if (index < 10) {
    NSString *path = [NSString stringWithFormat:@"./frame-%003d.png", index];
    [self save:sampleBuffer path:path];
  }
}

- (void)save:(CMSampleBufferRef)sampleBuffer path:(NSString *)path {
  CVImageBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
  CIImage *image = [CIImage imageWithCVImageBuffer:frame];
  NSBitmapImageRep *bitmapRep = [[NSBitmapImageRep alloc] initWithCIImage:image];
  NSData *jpgData = [bitmapRep representationUsingType:NSJPEGFileType properties:nil];
  [jpgData writeToFile:path atomically:NO];
  // [bitmapRep representationUsingType:NSPNGFileType properties: nil];
  // [pngData writeToFile: @"result.png" atomically: NO];
}
@end

struct av_capture {
  AVCaptureSession *session;
  AVCaptureDevice *device;
  AVCaptureDeviceInput *device_input;
  AVCaptureVideoDataOutput *out;

  OBSAVCaptureDelegate *delegate;
  dispatch_queue_t queue;
};

NSError *error;
struct av_capture capture;

void init() {
  NSLog(@"init capture");

  // for (NSScreen *each in [NSScreen screens]) {
  //   NSLog(@"%@", each.localizedName);
  // }

  // AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes : mediaType : position :

  NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  // for (AVCaptureDevice *device in devices) {
  //   const char *name = [[device localizedName] UTF8String];
  //   fprintf(stderr, "Device: %s\n", name);
  // }

  // init session
  capture.session = [[AVCaptureSession alloc] init];
  capture.session.sessionPreset = AVCaptureSessionPreset1280x720;

  // init input
  // capture.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  capture.device = [devices objectAtIndex:0];
  const char *name = [[capture.device localizedName] UTF8String];
  fprintf(stderr, "Device: %s\n", name);
  capture.device_input = [AVCaptureDeviceInput deviceInputWithDevice:capture.device error:&error];
  if (capture.device_input) {
    if ([capture.session canAddInput:capture.device_input]) {
      [capture.session addInput:capture.device_input];
      NSLog(@"add input OK");
    }
  }

  // init output
  capture.out = [[AVCaptureVideoDataOutput alloc] init];
  capture.delegate = [[OBSAVCaptureDelegate alloc] init];
  capture.delegate->capture = &capture;
  capture.queue = dispatch_queue_create(NULL, NULL);
  [capture.out setSampleBufferDelegate:capture.delegate queue:capture.queue];
  if (capture.out) {
    if ([capture.session canAddOutput:capture.out]) {
      [capture.session addOutput:capture.out];
      NSLog(@"add output OK");
    }
  }

  [capture.session startRunning];
}

CMakeLists.txt

cmake_minimum_required(VERSION 3.10)
project(capture)

find_library(AVFOUNDATION AVFoundation)
find_library(COCOA Cocoa)
find_library(COREFOUNDATION CoreFoundation)
find_library(COREMEDIA CoreMedia)
find_library(COREVIDEO CoreVideo)
find_library(COREIMAGE CoreImage)
find_library(COCOA Cocoa)
include_directories(${COREFOUNDATION} ${COCOA})

add_executable(app main.cpp capture.m)

target_link_libraries(app
    ${AVFOUNDATION}
    ${COCOA}
    ${COREFOUNDATION}
    ${COREMEDIA}
    ${COREVIDEO}
  ${COREIMAGE}
    ${COCOA})

勉强可以实现,但是说实话非常别扭。