diff --git a/CMakeLists.txt b/CMakeLists.txt index d385cd1..2ea9717 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,17 +14,23 @@ endif (UNIX AND NOT APPLE) if (APPLE) add_subdirectory (opencv) - if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + if (OpenCV_FOUND) + set(useOpenCV "true") + else() + if(NOT ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")) + message (ERROR "clang is required to build libcammacos") + endif() + set(useOpenCV "false") add_subdirectory (macos) - endif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + endif() endif (APPLE) if (NOT UNIX) message (ERROR "This package only builds on Unix platforms") endif (NOT UNIX) -install_files(/lua/camera init.lua) - SET(src) -SET(luasrc init.lua) +SET(luasrc "${CMAKE_BINARY_DIR}/init.lua") +configure_file(init.lua.in "${luasrc}") +install_files(/lua/camera "${luasrc}") ADD_TORCH_PACKAGE(camera "${src}" "${luasrc}" "Image Processing") diff --git a/init.lua b/init.lua.in similarity index 92% rename from init.lua rename to init.lua.in index 02da20f..227a80a 100644 --- a/init.lua +++ b/init.lua.in @@ -8,6 +8,9 @@ require 'xlua' ---------------------------------- -- load camera driver based on OS ---------------------------------- + +useOpenCV = @useOpenCV@ + if useOpenCV then if not xlua.require 'camopencv' then xlua.error('failed to load camopencv wrapper: verify that camopencv is installed') @@ -17,8 +20,8 @@ elseif sys.OS == 'linux' then xlua.error('failed to load video4linux wrapper: verify that you have v4l2 libs') end elseif sys.OS == 'macos' then - if not xlua.require 'camopencv' then - xlua.error('failed to load camopencv wrapper: verify that camopencv is installed') + if not xlua.require 'cammacos' then + xlua.error('failed to load cammacos wrapper: verify that cammacos is installed') end else xlua.error('no camera driver available for your OS, sorry :-(') diff --git a/macos/camera.h b/macos/camera.h index 1c6fc02..cf5d0ae 100644 --- a/macos/camera.h +++ b/macos/camera.h @@ -3,7 +3,7 @@ // #import -#import +#import #define error(...) fprintf(stderr, __VA_ARGS__) #define console(...) (!g_quiet && printf(__VA_ARGS__)) @@ -12,34 +12,34 @@ BOOL g_verbose = NO; BOOL g_quiet = NO; -@interface ImageSnap : NSObject { +@interface ImageSnap : NSObject { - QTCaptureSession *mCaptureSession; - QTCaptureDeviceInput *mCaptureDeviceInput; - QTCaptureDecompressedVideoOutput *mCaptureDecompressedVideoOutput; + AVCaptureSession *mCaptureSession; + AVCaptureDeviceInput *mCaptureDeviceInput; + AVCaptureVideoDataOutput *mCaptureDecompressedVideoOutput; CVImageBufferRef mCurrentImageBuffer; } /** - * Returns all attached QTCaptureDevice objects that have video. - * This includes video-only devices (QTMediaTypeVideo) and - * audio/video devices (QTMediaTypeMuxed). + * Returns all attached AVCaptureDevice objects that have video. + * This includes video-only devices (AVMediaTypeVideo) and + * audio/video devices (AVMediaTypeMuxed). * * @return autoreleased array of video devices */ +(NSArray *)videoDevices; /** - * Returns the default QTCaptureDevice object for video + * Returns the default AVCaptureDevice object for video * or nil if none is found. */ -+(QTCaptureDevice *)defaultVideoDevice; ++(AVCaptureDevice *)defaultVideoDevice; /** - * Returns the QTCaptureDevice with the given name + * Returns the AVCaptureDevice with the given name * or nil if the device cannot be found. */ -+(QTCaptureDevice *)deviceNamed:(NSString *)name; ++(AVCaptureDevice *)deviceNamed:(NSString *)name; /** * Writes an NSImage to disk, formatting it according @@ -58,7 +58,7 @@ BOOL g_quiet = NO; -(id)init; -(void)dealloc; --(BOOL)startSession:(QTCaptureDevice *)device withWidth:(unsigned int)width withHeight:(unsigned int)height; +-(BOOL)startSession:(AVCaptureDevice *)device withWidth:(unsigned int)width withHeight:(unsigned int)height; -(CIImage *)snapshot; -(void)stopSession; diff --git a/macos/camera.m b/macos/camera.m index 41c114c..983e169 100644 --- a/macos/camera.m +++ b/macos/camera.m @@ -25,11 +25,13 @@ #include @interface ImageSnap() +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; -- (void)captureOutput:(QTCaptureOutput *)captureOutput - didOutputVideoFrame:(CVImageBufferRef)videoFrame - withSampleBuffer:(QTSampleBuffer *)sampleBuffer - fromConnection:(QTCaptureConnection *)connection; +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; @end @@ -58,28 +60,28 @@ - (void)dealloc{ // Returns an array of video devices attached to this computer. + (NSArray *)videoDevices{ NSMutableArray *results = [NSMutableArray arrayWithCapacity:3]; - [results addObjectsFromArray:[QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]]; - [results addObjectsFromArray:[QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed]]; + [results addObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]]; + [results addObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]]; return results; } // Returns the default video device or nil if none found. -+ (QTCaptureDevice *)defaultVideoDevice{ - QTCaptureDevice *device = nil; ++ (AVCaptureDevice *)defaultVideoDevice{ + AVCaptureDevice *device = nil; - device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo]; + device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; if( device == nil ){ - device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed]; + device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed]; } return device; } // Returns the named capture device or nil if not found. -+(QTCaptureDevice *)deviceNamed:(NSString *)name{ - QTCaptureDevice *result = nil; ++(AVCaptureDevice *)deviceNamed:(NSString *)name{ + AVCaptureDevice *result = nil; NSArray *devices = [ImageSnap videoDevices]; - for( QTCaptureDevice *device in devices ){ + for( AVCaptureDevice *device in devices ){ if ( [name isEqualToString:[device description]] ){ result = device; } // end if: match @@ -229,7 +231,7 @@ -(void)stopSession{ /** * Begins the capture session. Frames begin coming in. */ --(BOOL)startSession:(QTCaptureDevice *)device +-(BOOL)startSession:(AVCaptureDevice *)device withWidth:(unsigned int)width withHeight:(unsigned int)height { @@ -257,22 +259,14 @@ -(BOOL)startSession:(QTCaptureDevice *)device // Create the capture session - verbose( "\tCreating QTCaptureSession..." ); - mCaptureSession = [[QTCaptureSession alloc] init]; + verbose( "\tCreating AVCaptureSession..." ); + mCaptureSession = [[AVCaptureSession alloc] init]; verbose( "Done.\n"); - if( ![device open:&error] ){ - error( "\tCould not create capture session.\n" ); - [mCaptureSession release]; - mCaptureSession = nil; - return NO; - } - // Create input object from the device - verbose( "\tCreating QTCaptureDeviceInput with %s...", [[device description] UTF8String] ); - mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device]; - verbose( "Done.\n"); - if (![mCaptureSession addInput:mCaptureDeviceInput error:&error]) { + verbose( "\tCreating AVCaptureDeviceInput with %s...", [[device description] UTF8String] ); + mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error]; + if (!mCaptureDeviceInput) { error( "\tCould not convert device to input device.\n"); [mCaptureSession release]; [mCaptureDeviceInput release]; @@ -280,20 +274,14 @@ -(BOOL)startSession:(QTCaptureDevice *)device mCaptureDeviceInput = nil; return NO; } + [mCaptureSession addInput:mCaptureDeviceInput]; + verbose( "Done.\n"); // Decompressed video output - verbose( "\tCreating QTCaptureDecompressedVideoOutput..."); - mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init]; - [mCaptureDecompressedVideoOutput setDelegate:self]; - - [mCaptureDecompressedVideoOutput setPixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys: - [NSNumber numberWithUnsignedInt:width], (id)kCVPixelBufferWidthKey, - [NSNumber numberWithUnsignedInt:height], (id)kCVPixelBufferHeightKey, - [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey, - nil]]; + verbose( "\tCreating AVCaptureVideoDataOutput..."); + mCaptureDecompressedVideoOutput = [[AVCaptureVideoDataOutput alloc] init]; - verbose( "Done.\n" ); - if (![mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error]) { + if (!mCaptureDecompressedVideoOutput) { error( "\tCould not create decompressed output.\n"); [mCaptureSession release]; [mCaptureDeviceInput release]; @@ -303,6 +291,15 @@ -(BOOL)startSession:(QTCaptureDevice *)device mCaptureDecompressedVideoOutput = nil; return NO; } + [mCaptureSession addOutput:mCaptureDecompressedVideoOutput]; + + dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL); + [mCaptureDecompressedVideoOutput setSampleBufferDelegate:self queue:queue]; + dispatch_release(queue); + verbose( "Done.\n" ); + + NSDictionary *newSettings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB) }; + mCaptureDecompressedVideoOutput.videoSettings = newSettings; // Clear old image? verbose("\tEntering synchronized block to clear memory..."); @@ -322,19 +319,15 @@ -(BOOL)startSession:(QTCaptureDevice *)device -// This delegate method is called whenever the QTCaptureDecompressedVideoOutput receives a frame -- (void)captureOutput:(QTCaptureOutput *)captureOutput - didOutputVideoFrame:(CVImageBufferRef)videoFrame - withSampleBuffer:(QTSampleBuffer *)sampleBuffer - fromConnection:(QTCaptureConnection *)connection +// This delegate method is called whenever the AVCaptureVideoDataOutput receives a frame +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; { verbose( "." ); - if (videoFrame == nil ) { - verbose( "'nil' Frame captured.\n" ); - return; - } // Swap out old frame for new one + CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer); CVImageBufferRef imageBufferToRelease; CVBufferRetain(videoFrame); @@ -343,7 +336,14 @@ - (void)captureOutput:(QTCaptureOutput *)captureOutput mCurrentImageBuffer = videoFrame; } // end sync CVBufferRelease(imageBufferToRelease); +} +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; +{ + verbose( "." ); + verbose( "'nil' (dropped) Frame captured.\n" ); } @end @@ -354,7 +354,7 @@ - (void)captureOutput:(QTCaptureOutput *)captureOutput // static vars static int nbcams = 0; static ImageSnap **snap = NULL; -static QTCaptureDevice **device = NULL; +static AVCaptureDevice **device = NULL; static NSAutoreleasePool * pool = NULL; // start up all cameras found @@ -379,7 +379,7 @@ int initCameras(lua_State *L) { int k = 0; if ([deviceName count] > 0) { printf("found %ld video device(s):\n", [deviceName count]); - for( QTCaptureDevice *name in deviceName ){ + for( AVCaptureDevice *name in deviceName ){ printf( "%d: %s\n", k++, [[name description] UTF8String] ); } } else { @@ -393,9 +393,9 @@ int initCameras(lua_State *L) { nbcams = [deviceName count]; printf("only using the first %d camera(s)\n", nbcams); } - device = malloc(sizeof(QTCaptureDevice *)*nbcams); + device = malloc(sizeof(AVCaptureDevice *)*nbcams); int i = 0, j = 0; - for( QTCaptureDevice *dev in deviceName ) { + for( AVCaptureDevice *dev in deviceName ) { // next cam: for (int k=1; k<=nbcams; k++) { lua_rawgeti(L, 1, k); @@ -431,50 +431,51 @@ int initCameras(lua_State *L) { return 1; } -// grab next frames -int grabFrames(lua_State *L) { - - // grab pixels for each camera - for (int i=0; istride[1]; + int m1 = tensor->stride[2]; + int m2 = tensor->stride[0]; + int i, j, k; + const int nChannels = [imageRep samplesPerPixel]; + for (i = 0; i < height; i++) { + for (j = 0, k = 0; j < width; j++, k+= m1) { + dst[k] = bytes[i*bytesPerRow + j*nChannels + 2]/255.; + dst[k+m2] = bytes[i*bytesPerRow + j*nChannels + 1]/255.; + dst[k+2*m2] = bytes[i*bytesPerRow + j*nChannels + 0]/255.; } - } - - // cleanup - [imageRep release]; - [image release]; + dst += m0; } + // cleanup + [imageRep release]; + [image release]; + // done return 0; } @@ -497,7 +498,7 @@ int releaseCameras(lua_State *L) { // Register functions into lua space static const struct luaL_reg cammacos [] = { {"initCameras", initCameras}, - {"grabFrames", grabFrames}, + {"grabFrame", grabFrame}, {"releaseCameras", releaseCameras}, {NULL, NULL} /* sentinel */ }; diff --git a/macos/init.lua b/macos/init.lua index cc7cafb..1414baa 100644 --- a/macos/init.lua +++ b/macos/init.lua @@ -38,37 +38,23 @@ function Camera:__init(...) end -- buffers - self.tensorsized = {} - self.buffer = {} - self.tensortyped = {} - for i = 1,#self.idx do - self.tensorsized[i] = torch.FloatTensor(3, height, width) - self.buffer[i] = torch.FloatTensor() - self.tensortyped[i] = torch.Tensor(3, height, width) - end + self.tensorsized = torch.FloatTensor(3, height, width) + self.buffer = torch.FloatTensor() + self.tensortyped = torch.Tensor(3, height, width) end function Camera:forward() - -- grab all frames - libcammacos.grabFrames(self.buffer) - - -- process all frames - for i = 1,#self.idx do - -- resize frames - if self.tensorsized[i]:size(2) ~= self.buffer[i]:size(2) or self.tensorsized[i]:size(3) ~= self.buffer[i]:size(3) then - image.scale(self.tensorsized[i],self.buffer[i]) - else - self.tensorsized[i] = self.buffer[i] - end - -- retype frames - if self.tensortyped[i]:type() ~= self.tensorsized[i]:type() then - self.tensortyped[i]:copy(self.tensorsized[i]) - else - self.tensortyped[i] = self.tensorsized[i] - end + libcammacos.grabFrame(self.idx, self.buffer) + if self.tensorsized:size(2) ~= self.buffer:size(2) or self.tensorsized:size(3) ~= self.buffer:size(3) then + image.scale(self.tensorsized, self.buffer) + else + self.tensorsized = self.buffer + end + if self.tensortyped:type() ~= self.tensorsized:type() then + self.tensortyped:copy(self.tensorsized) + else + self.tensortyped = self.tensorsized end - - -- done return self.tensortyped end diff --git a/opencv/CMakeLists.txt b/opencv/CMakeLists.txt index b75efae..61c0f71 100644 --- a/opencv/CMakeLists.txt +++ b/opencv/CMakeLists.txt @@ -8,3 +8,5 @@ IF(OpenCV_FOUND) ADD_TORCH_PACKAGE(camopencv "${src}" "${luasrc}" "Image Processing") TARGET_LINK_LIBRARIES(camopencv luaT TH ${OpenCV_LIBS}) ENDIF(OpenCV_FOUND) + +set(OpenCV_FOUND ${OpenCV_FOUND} PARENT_SCOPE)