mirror of
https://github.com/purplecabbage/phonegap-plugins.git
synced 2026-01-14 00:47:54 -05:00
AVCaptureSession wasn't being released and as such leaks memory. In our application it wasn't really an issue on iPhones but it becomes a problem fairly quickly (~80 scans) on low-memory devices such as the third and fourth generation iPod Touch.
862 lines
34 KiB
Plaintext
862 lines
34 KiB
Plaintext
/*
|
|
* PhoneGap is available under *either* the terms of the modified BSD license *or* the
|
|
* MIT License (2008). See http://opensource.org/licenses/alphabetical for full text.
|
|
*
|
|
* Copyright 2011 Matt Kane. All rights reserved.
|
|
* Copyright (c) 2011, IBM Corporation
|
|
*/
|
|
|
|
#import <Foundation/Foundation.h>
|
|
#import <UIKit/UIKit.h>
|
|
#import <AVFoundation/AVFoundation.h>
|
|
#import <AssetsLibrary/AssetsLibrary.h>
|
|
|
|
//------------------------------------------------------------------------------
|
|
// use the all-in-one version of zxing that we built
|
|
//------------------------------------------------------------------------------
|
|
#import "zxing-all-in-one.h"
|
|
|
|
#import <Cordova/CDVPlugin.h>
|
|
|
|
|
|
//------------------------------------------------------------------------------
|
|
// Delegate to handle orientation functions
|
|
//
|
|
//------------------------------------------------------------------------------
|
|
@protocol CDVBarcodeScannerOrientationDelegate <NSObject>
|
|
|
|
- (NSUInteger)supportedInterfaceOrientations;
|
|
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
|
|
- (BOOL)shouldAutorotate;
|
|
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// Adds a shutter button to the UI, and changes the scan from continuous to
|
|
// only performing a scan when you click the shutter button. For testing.
|
|
//------------------------------------------------------------------------------
|
|
#define USE_SHUTTER 0
|
|
|
|
//------------------------------------------------------------------------------
|
|
@class CDVbcsProcessor;
|
|
@class CDVbcsViewController;
|
|
|
|
//------------------------------------------------------------------------------
|
|
// plugin class
|
|
//------------------------------------------------------------------------------
|
|
@interface CDVBarcodeScanner : CDVPlugin {}
|
|
- (NSString*)isScanNotPossible;
|
|
- (void)scan:(NSMutableArray*)arguments withDict:(NSMutableDictionary*)options;
|
|
- (void)encode:(NSMutableArray*)arguments withDict:(NSMutableDictionary*)options;
|
|
- (void)returnSuccess:(NSString*)scannedText format:(NSString*)format cancelled:(BOOL)cancelled callback:(NSString*)callback;
|
|
- (void)returnError:(NSString*)message callback:(NSString*)callback;
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// class that does the grunt work
|
|
//------------------------------------------------------------------------------
|
|
@interface CDVbcsProcessor : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {}
|
|
@property (nonatomic, retain) CDVBarcodeScanner* plugin;
|
|
@property (nonatomic, retain) NSString* callback;
|
|
@property (nonatomic, retain) UIViewController* parentViewController;
|
|
@property (nonatomic, retain) CDVbcsViewController* viewController;
|
|
@property (nonatomic, retain) AVCaptureSession* captureSession;
|
|
@property (nonatomic, retain) AVCaptureVideoPreviewLayer* previewLayer;
|
|
@property (nonatomic, retain) NSString* alternateXib;
|
|
@property (nonatomic) BOOL is1D;
|
|
@property (nonatomic) BOOL is2D;
|
|
@property (nonatomic) BOOL capturing;
|
|
|
|
- (id)initWithPlugin:(CDVBarcodeScanner*)plugin callback:(NSString*)callback parentViewController:(UIViewController*)parentViewController alterateOverlayXib:(NSString *)alternateXib;
|
|
- (void)scanBarcode;
|
|
- (void)barcodeScanSucceeded:(NSString*)text format:(NSString*)format;
|
|
- (void)barcodeScanFailed:(NSString*)message;
|
|
- (void)barcodeScanCancelled;
|
|
- (void)openDialog;
|
|
- (NSString*)setUpCaptureSession;
|
|
- (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection;
|
|
- (NSString*)formatStringFrom:(zxing::BarcodeFormat)format;
|
|
- (UIImage*)getImageFromSample:(CMSampleBufferRef)sampleBuffer;
|
|
- (zxing::Ref<zxing::LuminanceSource>) getLuminanceSourceFromSample:(CMSampleBufferRef)sampleBuffer imageBytes:(uint8_t**)ptr;
|
|
- (UIImage*) getImageFromLuminanceSource:(zxing::LuminanceSource*)luminanceSource;
|
|
- (void)dumpImage:(UIImage*)image;
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// view controller for the ui
|
|
//------------------------------------------------------------------------------
|
|
@interface CDVbcsViewController : UIViewController <CDVBarcodeScannerOrientationDelegate> {}
|
|
@property (nonatomic, retain) CDVbcsProcessor* processor;
|
|
@property (nonatomic, retain) NSString* alternateXib;
|
|
@property (nonatomic) BOOL shutterPressed;
|
|
@property (nonatomic, retain) IBOutlet UIView* overlayView;
|
|
// unsafe_unretained is equivalent to assign - used to prevent retain cycles in the property below
|
|
@property (nonatomic, unsafe_unretained) id orientationDelegate;
|
|
|
|
- (id)initWithProcessor:(CDVbcsProcessor*)processor alternateOverlay:(NSString *)alternateXib;
|
|
- (void)startCapturing;
|
|
- (UIView*)buildOverlayView;
|
|
- (UIImage*)buildReticleImage;
|
|
- (void)shutterButtonPressed;
|
|
- (IBAction)cancelButtonPressed:(id)sender;
|
|
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// plugin class
|
|
//------------------------------------------------------------------------------
|
|
@implementation CDVBarcodeScanner
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (NSString*)isScanNotPossible {
|
|
NSString* result = nil;
|
|
|
|
Class aClass = NSClassFromString(@"AVCaptureSession");
|
|
if (aClass == nil) {
|
|
return @"AVFoundation Framework not available";
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)scan:(NSMutableArray*)arguments withDict:(NSMutableDictionary*)options {
|
|
CDVbcsProcessor* processor;
|
|
NSString* callback;
|
|
NSString* capabilityError;
|
|
|
|
callback = [arguments objectAtIndex:0];
|
|
|
|
// We allow the user to define an alternate xib file for loading the overlay.
|
|
NSString *overlayXib = nil;
|
|
if ( [arguments count] == 2 )
|
|
{
|
|
overlayXib = [arguments objectAtIndex:1];
|
|
}
|
|
|
|
capabilityError = [self isScanNotPossible];
|
|
if (capabilityError) {
|
|
[self returnError:capabilityError callback:callback];
|
|
return;
|
|
}
|
|
|
|
processor = [[CDVbcsProcessor alloc]
|
|
initWithPlugin:self
|
|
callback:callback
|
|
parentViewController:self.viewController
|
|
alterateOverlayXib:overlayXib
|
|
];
|
|
|
|
// queue [processor scanBarcode] to run on the event loop
|
|
[processor performSelector:@selector(scanBarcode) withObject:nil afterDelay:0];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)encode:(NSMutableArray*)arguments withDict:(NSMutableDictionary*)options {
|
|
[self returnError:@"encode function not supported" callback:[arguments objectAtIndex:0]];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)returnSuccess:(NSString*)scannedText format:(NSString*)format cancelled:(BOOL)cancelled callback:(NSString*)callback {
|
|
NSNumber* cancelledNumber = [NSNumber numberWithInt:(cancelled?1:0)];
|
|
|
|
NSMutableDictionary* resultDict = [[[NSMutableDictionary alloc] init] autorelease];
|
|
[resultDict setObject:scannedText forKey:@"text"];
|
|
[resultDict setObject:format forKey:@"format"];
|
|
[resultDict setObject:cancelledNumber forKey:@"cancelled"];
|
|
|
|
CDVPluginResult* result = [CDVPluginResult
|
|
resultWithStatus: CDVCommandStatus_OK
|
|
messageAsDictionary: resultDict
|
|
];
|
|
|
|
NSString* js = [result toSuccessCallbackString:callback];
|
|
|
|
[self writeJavascript:js];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)returnError:(NSString*)message callback:(NSString*)callback {
|
|
CDVPluginResult* result = [CDVPluginResult
|
|
resultWithStatus: CDVCommandStatus_OK
|
|
messageAsString: message
|
|
];
|
|
|
|
NSString* js = [result toErrorCallbackString:callback];
|
|
|
|
[self writeJavascript:js];
|
|
}
|
|
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// class that does the grunt work
|
|
//------------------------------------------------------------------------------
|
|
@implementation CDVbcsProcessor
|
|
|
|
@synthesize plugin = _plugin;
|
|
@synthesize callback = _callback;
|
|
@synthesize parentViewController = _parentViewController;
|
|
@synthesize viewController = _viewController;
|
|
@synthesize captureSession = _captureSession;
|
|
@synthesize previewLayer = _previewLayer;
|
|
@synthesize alternateXib = _alternateXib;
|
|
@synthesize is1D = _is1D;
|
|
@synthesize is2D = _is2D;
|
|
@synthesize capturing = _capturing;
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (id)initWithPlugin:(CDVBarcodeScanner*)plugin
|
|
callback:(NSString*)callback
|
|
parentViewController:(UIViewController*)parentViewController
|
|
alterateOverlayXib:(NSString *)alternateXib {
|
|
self = [super init];
|
|
if (!self) return self;
|
|
|
|
self.plugin = plugin;
|
|
self.callback = callback;
|
|
self.parentViewController = parentViewController;
|
|
self.alternateXib = alternateXib;
|
|
|
|
self.is1D = YES;
|
|
self.is2D = YES;
|
|
self.capturing = NO;
|
|
|
|
return self;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)dealloc {
|
|
self.plugin = nil;
|
|
self.callback = nil;
|
|
self.parentViewController = nil;
|
|
self.viewController = nil;
|
|
self.captureSession = nil;
|
|
self.previewLayer = nil;
|
|
self.alternateXib = nil;
|
|
|
|
self.capturing = NO;
|
|
|
|
[super dealloc];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)scanBarcode {
|
|
NSString* errorMessage = [self setUpCaptureSession];
|
|
if (errorMessage) {
|
|
[self barcodeScanFailed:errorMessage];
|
|
return;
|
|
}
|
|
|
|
self.viewController = [[[CDVbcsViewController alloc] initWithProcessor: self alternateOverlay:self.alternateXib] autorelease];
|
|
// here we set the orientation delegate to the MainViewController of the app (orientation controlled in the Project Settings)
|
|
self.viewController.orientationDelegate = self.plugin.viewController;
|
|
|
|
// delayed [self openDialog];
|
|
[self performSelector:@selector(openDialog) withObject:nil afterDelay:1];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)openDialog {
|
|
[self.parentViewController
|
|
presentModalViewController:self.viewController
|
|
animated:YES
|
|
];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)barcodeScanDone {
|
|
self.capturing = NO;
|
|
[self.captureSession stopRunning];
|
|
[self.parentViewController dismissModalViewControllerAnimated: YES];
|
|
|
|
// viewcontroller holding onto a reference to us, release them so they
|
|
// will release us
|
|
self.viewController = nil;
|
|
|
|
// delayed [self release];
|
|
[self performSelector:@selector(release) withObject:nil afterDelay:1];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)barcodeScanSucceeded:(NSString*)text format:(NSString*)format {
|
|
[self barcodeScanDone];
|
|
[self.plugin returnSuccess:text format:format cancelled:FALSE callback:self.callback];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)barcodeScanFailed:(NSString*)message {
|
|
[self barcodeScanDone];
|
|
[self.plugin returnError:message callback:self.callback];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)barcodeScanCancelled {
|
|
[self barcodeScanDone];
|
|
[self.plugin returnSuccess:@"" format:@"" cancelled:TRUE callback:self.callback];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (NSString*)setUpCaptureSession {
|
|
NSError* error = nil;
|
|
|
|
AVCaptureSession* captureSession = [[[AVCaptureSession alloc] init] autorelease];
|
|
self.captureSession = captureSession;
|
|
|
|
AVCaptureDevice* device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
|
if (!device) return @"unable to obtain video capture device";
|
|
|
|
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
|
|
if (!input) return @"unable to obtain video capture device input";
|
|
|
|
AVCaptureVideoDataOutput* output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
|
|
if (!output) return @"unable to obtain video capture output";
|
|
|
|
NSDictionary* videoOutputSettings = [NSDictionary
|
|
dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
|
|
forKey:(id)kCVPixelBufferPixelFormatTypeKey
|
|
];
|
|
|
|
output.alwaysDiscardsLateVideoFrames = YES;
|
|
output.videoSettings = videoOutputSettings;
|
|
|
|
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
|
|
|
if (![captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]) {
|
|
return @"unable to preset medium quality video capture";
|
|
}
|
|
|
|
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
|
|
|
|
if ([captureSession canAddInput:input]) {
|
|
[captureSession addInput:input];
|
|
}
|
|
else {
|
|
return @"unable to add video capture device input to session";
|
|
}
|
|
|
|
if ([captureSession canAddOutput:output]) {
|
|
[captureSession addOutput:output];
|
|
}
|
|
else {
|
|
return @"unable to add video capture output to session";
|
|
}
|
|
|
|
// setup capture preview layer
|
|
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];
|
|
|
|
// run on next event loop pass [captureSession startRunning]
|
|
[captureSession performSelector:@selector(startRunning) withObject:nil afterDelay:0];
|
|
|
|
return nil;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// this method gets sent the captured frames
|
|
//--------------------------------------------------------------------------
|
|
- (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection {
|
|
|
|
if (!self.capturing) return;
|
|
|
|
#if USE_SHUTTER
|
|
if (!self.viewController.shutterPressed) return;
|
|
self.viewController.shutterPressed = NO;
|
|
|
|
UIView* flashView = [[[UIView alloc] initWithFrame:self.viewController.view.frame] autorelease];
|
|
[flashView setBackgroundColor:[UIColor whiteColor]];
|
|
[self.viewController.view.window addSubview:flashView];
|
|
|
|
[UIView
|
|
animateWithDuration:.4f
|
|
animations:^{
|
|
[flashView setAlpha:0.f];
|
|
}
|
|
completion:^(BOOL finished){
|
|
[flashView removeFromSuperview];
|
|
}
|
|
];
|
|
|
|
// [self dumpImage: [[self getImageFromSample:sampleBuffer] autorelease]];
|
|
#endif
|
|
|
|
|
|
using namespace zxing;
|
|
|
|
// LuminanceSource is pretty dumb; we have to give it a pointer to
|
|
// a byte array, but then can't get it back out again. We need to
|
|
// get it back to free it. Saving it in imageBytes.
|
|
uint8_t* imageBytes;
|
|
|
|
// NSTimeInterval timeStart = [NSDate timeIntervalSinceReferenceDate];
|
|
|
|
try {
|
|
DecodeHints decodeHints;
|
|
decodeHints.addFormat(BarcodeFormat_QR_CODE);
|
|
decodeHints.addFormat(BarcodeFormat_DATA_MATRIX);
|
|
decodeHints.addFormat(BarcodeFormat_UPC_E);
|
|
decodeHints.addFormat(BarcodeFormat_UPC_A);
|
|
decodeHints.addFormat(BarcodeFormat_EAN_8);
|
|
decodeHints.addFormat(BarcodeFormat_EAN_13);
|
|
decodeHints.addFormat(BarcodeFormat_CODE_128);
|
|
decodeHints.addFormat(BarcodeFormat_CODE_39);
|
|
// decodeHints.addFormat(BarcodeFormat_ITF); causing crashes
|
|
|
|
// here's the meat of the decode process
|
|
Ref<LuminanceSource> luminanceSource ([self getLuminanceSourceFromSample: sampleBuffer imageBytes:&imageBytes]);
|
|
// [self dumpImage: [[self getImageFromLuminanceSource:luminanceSource] autorelease]];
|
|
Ref<Binarizer> binarizer (new HybridBinarizer(luminanceSource));
|
|
Ref<BinaryBitmap> bitmap (new BinaryBitmap(binarizer));
|
|
Ref<MultiFormatReader> reader (new MultiFormatReader());
|
|
Ref<Result> result (reader->decode(bitmap, decodeHints));
|
|
Ref<String> resultText (result->getText());
|
|
BarcodeFormat formatVal = result->getBarcodeFormat();
|
|
NSString* format = [self formatStringFrom:formatVal];
|
|
|
|
|
|
const char* cString = resultText->getText().c_str();
|
|
NSString* resultString = [[[NSString alloc] initWithCString:cString encoding:NSUTF8StringEncoding] autorelease];
|
|
|
|
[self barcodeScanSucceeded:resultString format:format];
|
|
|
|
}
|
|
catch (zxing::ReaderException &rex) {
|
|
// NSString *message = [[[NSString alloc] initWithCString:rex.what() encoding:NSUTF8StringEncoding] autorelease];
|
|
// NSLog(@"decoding: ReaderException: %@", message);
|
|
}
|
|
catch (zxing::IllegalArgumentException &iex) {
|
|
// NSString *message = [[[NSString alloc] initWithCString:iex.what() encoding:NSUTF8StringEncoding] autorelease];
|
|
// NSLog(@"decoding: IllegalArgumentException: %@", message);
|
|
}
|
|
catch (...) {
|
|
// NSLog(@"decoding: unknown exception");
|
|
// [self barcodeScanFailed:@"unknown exception decoding barcode"];
|
|
}
|
|
|
|
// NSTimeInterval timeElapsed = [NSDate timeIntervalSinceReferenceDate] - timeStart;
|
|
// NSLog(@"decoding completed in %dms", (int) (timeElapsed * 1000));
|
|
|
|
// free the buffer behind the LuminanceSource
|
|
if (imageBytes) {
|
|
free(imageBytes);
|
|
}
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// convert barcode format to string
|
|
//--------------------------------------------------------------------------
|
|
- (NSString*)formatStringFrom:(zxing::BarcodeFormat)format {
|
|
if (format == zxing::BarcodeFormat_QR_CODE) return @"QR_CODE";
|
|
if (format == zxing::BarcodeFormat_DATA_MATRIX) return @"DATA_MATRIX";
|
|
if (format == zxing::BarcodeFormat_UPC_E) return @"UPC_E";
|
|
if (format == zxing::BarcodeFormat_UPC_A) return @"UPC_A";
|
|
if (format == zxing::BarcodeFormat_EAN_8) return @"EAN_8";
|
|
if (format == zxing::BarcodeFormat_EAN_13) return @"EAN_13";
|
|
if (format == zxing::BarcodeFormat_CODE_128) return @"CODE_128";
|
|
if (format == zxing::BarcodeFormat_CODE_39) return @"CODE_39";
|
|
if (format == zxing::BarcodeFormat_ITF) return @"ITF";
|
|
return @"???";
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// convert capture's sample buffer (scanned picture) into the thing that
|
|
// zxing needs.
|
|
//--------------------------------------------------------------------------
|
|
- (zxing::Ref<zxing::LuminanceSource>) getLuminanceSourceFromSample:(CMSampleBufferRef)sampleBuffer imageBytes:(uint8_t**)ptr {
|
|
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
CVPixelBufferLockBaseAddress(imageBuffer, 0);
|
|
|
|
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
|
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
|
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
|
uint8_t* baseAddress = (uint8_t*) CVPixelBufferGetBaseAddress(imageBuffer);
|
|
|
|
// only going to get 90% of the min(width,height) of the captured image
|
|
size_t greyWidth = 9 * MIN(width, height) / 10;
|
|
uint8_t* greyData = (uint8_t*) malloc(greyWidth * greyWidth);
|
|
|
|
// remember this pointer so we can free it later
|
|
*ptr = greyData;
|
|
|
|
if (!greyData) {
|
|
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
|
|
throw new zxing::ReaderException("out of memory");
|
|
}
|
|
|
|
size_t offsetX = (width - greyWidth) / 2;
|
|
size_t offsetY = (height - greyWidth) / 2;
|
|
|
|
// pixel-by-pixel ...
|
|
for (size_t i=0; i<greyWidth; i++) {
|
|
for (size_t j=0; j<greyWidth; j++) {
|
|
// i,j are the coordinates from the sample buffer
|
|
// ni, nj are the coordinates in the LuminanceSource
|
|
// in this case, there's a rotation taking place
|
|
size_t ni = greyWidth-j;
|
|
size_t nj = i;
|
|
|
|
size_t baseOffset = (j+offsetY)*bytesPerRow + (i + offsetX)*4;
|
|
|
|
// convert from color to grayscale
|
|
// http://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale
|
|
size_t value = 0.11 * baseAddress[baseOffset] +
|
|
0.59 * baseAddress[baseOffset + 1] +
|
|
0.30 * baseAddress[baseOffset + 2];
|
|
|
|
greyData[nj*greyWidth + ni] = value;
|
|
}
|
|
}
|
|
|
|
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
|
|
|
|
using namespace zxing;
|
|
|
|
Ref<LuminanceSource> luminanceSource (
|
|
new GreyscaleLuminanceSource(greyData, greyWidth, greyWidth, 0, 0, greyWidth, greyWidth)
|
|
);
|
|
|
|
return luminanceSource;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// for debugging
|
|
//--------------------------------------------------------------------------
|
|
- (UIImage*) getImageFromLuminanceSource:(zxing::LuminanceSource*)luminanceSource {
|
|
unsigned char* bytes = luminanceSource->getMatrix();
|
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
|
|
CGContextRef context = CGBitmapContextCreate(
|
|
bytes,
|
|
luminanceSource->getWidth(), luminanceSource->getHeight(), 8, luminanceSource->getWidth(),
|
|
colorSpace,
|
|
kCGImageAlphaNone
|
|
);
|
|
|
|
CGImageRef cgImage = CGBitmapContextCreateImage(context);
|
|
UIImage* image = [[UIImage alloc] initWithCGImage:cgImage];
|
|
|
|
CGContextRelease(context);
|
|
CGColorSpaceRelease(colorSpace);
|
|
CGImageRelease(cgImage);
|
|
free(bytes);
|
|
|
|
return image;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// for debugging
|
|
//--------------------------------------------------------------------------
|
|
- (UIImage*)getImageFromSample:(CMSampleBufferRef)sampleBuffer {
|
|
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
CVPixelBufferLockBaseAddress(imageBuffer, 0);
|
|
|
|
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
|
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
|
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
|
|
|
uint8_t* baseAddress = (uint8_t*) CVPixelBufferGetBaseAddress(imageBuffer);
|
|
int length = height * bytesPerRow;
|
|
uint8_t* newBaseAddress = (uint8_t*) malloc(length);
|
|
memcpy(newBaseAddress, baseAddress, length);
|
|
baseAddress = newBaseAddress;
|
|
|
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
CGContextRef context = CGBitmapContextCreate(
|
|
baseAddress,
|
|
width, height, 8, bytesPerRow,
|
|
colorSpace,
|
|
kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst
|
|
);
|
|
|
|
CGImageRef cgImage = CGBitmapContextCreateImage(context);
|
|
UIImage* image = [[UIImage alloc] initWithCGImage:cgImage];
|
|
|
|
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
|
|
CGContextRelease(context);
|
|
CGColorSpaceRelease(colorSpace);
|
|
CGImageRelease(cgImage);
|
|
|
|
free(baseAddress);
|
|
|
|
return image;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
// for debugging
|
|
//--------------------------------------------------------------------------
|
|
- (void)dumpImage:(UIImage*)image {
|
|
NSLog(@"writing image to library: %dx%d", (int)image.size.width, (int)image.size.height);
|
|
ALAssetsLibrary* assetsLibrary = [[[ALAssetsLibrary alloc] init] autorelease];
|
|
[assetsLibrary
|
|
writeImageToSavedPhotosAlbum:image.CGImage
|
|
orientation:ALAssetOrientationUp
|
|
completionBlock:^(NSURL* assetURL, NSError* error){
|
|
if (error) NSLog(@" error writing image to library");
|
|
else NSLog(@" wrote image to library %@", assetURL);
|
|
}
|
|
];
|
|
}
|
|
|
|
@end
|
|
|
|
//------------------------------------------------------------------------------
|
|
// view controller for the ui
|
|
//------------------------------------------------------------------------------
|
|
@implementation CDVbcsViewController
|
|
@synthesize processor = _processor;
|
|
@synthesize shutterPressed = _shutterPressed;
|
|
@synthesize alternateXib = _alternateXib;
|
|
@synthesize overlayView = _overlayView;
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (id)initWithProcessor:(CDVbcsProcessor*)processor alternateOverlay:(NSString *)alternateXib {
|
|
self = [super init];
|
|
if (!self) return self;
|
|
|
|
self.processor = processor;
|
|
self.shutterPressed = NO;
|
|
self.alternateXib = alternateXib;
|
|
self.overlayView = nil;
|
|
return self;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)dealloc {
|
|
self.view = nil;
|
|
self.processor = nil;
|
|
self.shutterPressed = NO;
|
|
self.alternateXib = nil;
|
|
self.overlayView = nil;
|
|
[super dealloc];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)loadView {
|
|
self.view = [[[UIView alloc] initWithFrame: self.processor.parentViewController.view.frame] autorelease];
|
|
|
|
// setup capture preview layer
|
|
AVCaptureVideoPreviewLayer* previewLayer = self.processor.previewLayer;
|
|
previewLayer.frame = self.view.bounds;
|
|
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
|
|
|
if ([previewLayer isOrientationSupported]) {
|
|
[previewLayer setOrientation:AVCaptureVideoOrientationPortrait];
|
|
}
|
|
|
|
[self.view.layer insertSublayer:previewLayer below:[[self.view.layer sublayers] objectAtIndex:0]];
|
|
|
|
[self.view addSubview:[self buildOverlayView]];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)viewWillAppear:(BOOL)animated {
|
|
|
|
// set video orientation to what the camera sees
|
|
self.processor.previewLayer.orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
|
|
|
// this fixes the bug when the statusbar is landscape, and the preview layer
|
|
// starts up in portrait (not filling the whole view)
|
|
self.processor.previewLayer.frame = self.view.bounds;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)viewDidAppear:(BOOL)animated {
|
|
[self startCapturing];
|
|
|
|
[super viewDidAppear:animated];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)startCapturing {
|
|
self.processor.capturing = YES;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (void)shutterButtonPressed {
|
|
self.shutterPressed = YES;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (IBAction)cancelButtonPressed:(id)sender {
|
|
[self.processor performSelector:@selector(barcodeScanCancelled) withObject:nil afterDelay:0];
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (UIView *)buildOverlayViewFromXib
|
|
{
|
|
[[NSBundle mainBundle] loadNibNamed:self.alternateXib owner:self options:NULL];
|
|
|
|
if ( self.overlayView == nil )
|
|
{
|
|
NSLog(@"%@", @"An error occurred loading the overlay xib. It appears that the overlayView outlet is not set.");
|
|
return nil;
|
|
}
|
|
|
|
return self.overlayView;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
- (UIView*)buildOverlayView {
|
|
|
|
if ( nil != self.alternateXib )
|
|
{
|
|
return [self buildOverlayViewFromXib];
|
|
}
|
|
CGRect bounds = self.view.bounds;
|
|
bounds = CGRectMake(0, 0, bounds.size.width, bounds.size.height);
|
|
|
|
UIView* overlayView = [[[UIView alloc] initWithFrame:bounds] autorelease];
|
|
overlayView.autoresizesSubviews = YES;
|
|
overlayView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
|
overlayView.opaque = NO;
|
|
|
|
UIToolbar* toolbar = [[[UIToolbar alloc] init] autorelease];
|
|
toolbar.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin;
|
|
|
|
id cancelButton = [[[UIBarButtonItem alloc] autorelease]
|
|
initWithBarButtonSystemItem:UIBarButtonSystemItemCancel
|
|
target:(id)self
|
|
action:@selector(cancelButtonPressed:)
|
|
];
|
|
|
|
id flexSpace = [[[UIBarButtonItem alloc] autorelease]
|
|
initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
|
|
target:nil
|
|
action:nil
|
|
];
|
|
|
|
#if USE_SHUTTER
|
|
id shutterButton = [[UIBarButtonItem alloc]
|
|
initWithBarButtonSystemItem:UIBarButtonSystemItemCamera
|
|
target:(id)self
|
|
action:@selector(shutterButtonPressed)
|
|
];
|
|
|
|
toolbar.items = [NSArray arrayWithObjects:flexSpace,cancelButton,flexSpace,shutterButton,nil];
|
|
#else
|
|
toolbar.items = [NSArray arrayWithObjects:flexSpace,cancelButton,flexSpace,nil];
|
|
#endif
|
|
bounds = overlayView.bounds;
|
|
|
|
[toolbar sizeToFit];
|
|
CGFloat toolbarHeight = [toolbar frame].size.height;
|
|
CGFloat rootViewHeight = CGRectGetHeight(bounds);
|
|
CGFloat rootViewWidth = CGRectGetWidth(bounds);
|
|
CGRect rectArea = CGRectMake(0, rootViewHeight - toolbarHeight, rootViewWidth, toolbarHeight);
|
|
[toolbar setFrame:rectArea];
|
|
|
|
[overlayView addSubview: toolbar];
|
|
|
|
UIImage* reticleImage = [self buildReticleImage];
|
|
UIView* reticleView = [[[UIImageView alloc] initWithImage: reticleImage] autorelease];
|
|
CGFloat minAxis = MIN(rootViewHeight, rootViewWidth);
|
|
|
|
rectArea = CGRectMake(
|
|
0.5 * (rootViewWidth - minAxis),
|
|
0.5 * (rootViewHeight - minAxis),
|
|
minAxis,
|
|
minAxis
|
|
);
|
|
|
|
[reticleView setFrame:rectArea];
|
|
|
|
reticleView.opaque = NO;
|
|
reticleView.contentMode = UIViewContentModeScaleAspectFit;
|
|
reticleView.autoresizingMask = 0
|
|
| UIViewAutoresizingFlexibleLeftMargin
|
|
| UIViewAutoresizingFlexibleRightMargin
|
|
| UIViewAutoresizingFlexibleTopMargin
|
|
| UIViewAutoresizingFlexibleBottomMargin
|
|
;
|
|
|
|
[overlayView addSubview: reticleView];
|
|
|
|
return overlayView;
|
|
}
|
|
|
|
//--------------------------------------------------------------------------
|
|
|
|
#define RETICLE_SIZE 500.0f
|
|
#define RETICLE_WIDTH 10.0f
|
|
#define RETICLE_OFFSET 60.0f
|
|
#define RETICLE_ALPHA 0.4f
|
|
|
|
//-------------------------------------------------------------------------
|
|
// builds the green box and red line
|
|
//-------------------------------------------------------------------------
|
|
- (UIImage*)buildReticleImage {
|
|
UIImage* result;
|
|
UIGraphicsBeginImageContext(CGSizeMake(RETICLE_SIZE, RETICLE_SIZE));
|
|
CGContextRef context = UIGraphicsGetCurrentContext();
|
|
|
|
if (self.processor.is1D) {
|
|
UIColor* color = [UIColor colorWithRed:1.0 green:0.0 blue:0.0 alpha:RETICLE_ALPHA];
|
|
CGContextSetStrokeColorWithColor(context, color.CGColor);
|
|
CGContextSetLineWidth(context, RETICLE_WIDTH);
|
|
CGContextBeginPath(context);
|
|
CGFloat lineOffset = RETICLE_OFFSET+(0.5*RETICLE_WIDTH);
|
|
CGContextMoveToPoint(context, lineOffset, RETICLE_SIZE/2);
|
|
CGContextAddLineToPoint(context, RETICLE_SIZE-lineOffset, 0.5*RETICLE_SIZE);
|
|
CGContextStrokePath(context);
|
|
}
|
|
|
|
if (self.processor.is2D) {
|
|
UIColor* color = [UIColor colorWithRed:0.0 green:1.0 blue:0.0 alpha:RETICLE_ALPHA];
|
|
CGContextSetStrokeColorWithColor(context, color.CGColor);
|
|
CGContextSetLineWidth(context, RETICLE_WIDTH);
|
|
CGContextStrokeRect(context,
|
|
CGRectMake(
|
|
RETICLE_OFFSET,
|
|
RETICLE_OFFSET,
|
|
RETICLE_SIZE-2*RETICLE_OFFSET,
|
|
RETICLE_SIZE-2*RETICLE_OFFSET
|
|
)
|
|
);
|
|
}
|
|
|
|
result = UIGraphicsGetImageFromCurrentImageContext();
|
|
UIGraphicsEndImageContext();
|
|
return result;
|
|
}
|
|
|
|
#pragma mark CDVBarcodeScannerOrientationDelegate
|
|
|
|
- (BOOL)shouldAutorotate
|
|
{
|
|
if ((self.orientationDelegate != nil) && [self.orientationDelegate respondsToSelector:@selector(shouldAutorotate)]) {
|
|
return [self.orientationDelegate shouldAutorotate];
|
|
}
|
|
|
|
return YES;
|
|
}
|
|
|
|
- (NSUInteger)supportedInterfaceOrientations
|
|
{
|
|
if ((self.orientationDelegate != nil) && [self.orientationDelegate respondsToSelector:@selector(supportedInterfaceOrientations)]) {
|
|
return [self.orientationDelegate supportedInterfaceOrientations];
|
|
}
|
|
|
|
return UIInterfaceOrientationMaskPortrait;
|
|
}
|
|
|
|
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
|
|
{
|
|
if ((self.orientationDelegate != nil) && [self.orientationDelegate respondsToSelector:@selector(shouldAutorotateToInterfaceOrientation:)]) {
|
|
return [self.orientationDelegate shouldAutorotateToInterfaceOrientation:interfaceOrientation];
|
|
}
|
|
|
|
return YES;
|
|
}
|
|
|
|
- (void) willAnimateRotationToInterfaceOrientation:(UIInterfaceOrientation)orientation duration:(NSTimeInterval)duration
|
|
{
|
|
[CATransaction begin];
|
|
|
|
self.processor.previewLayer.orientation = orientation;
|
|
[self.processor.previewLayer layoutSublayers];
|
|
self.processor.previewLayer.frame = self.view.bounds;
|
|
|
|
[CATransaction commit];
|
|
[super willAnimateRotationToInterfaceOrientation:orientation duration:duration];
|
|
}
|
|
|
|
@end
|