There have been great barcode-reading libraries available for Xamarin for some time, but iOS 7 has built-in barcode-recognition support.
There's only one tricky bit: you have to tell the AVCaptureMetadataOutput
what types of barcodes you're interested in after you've added it to the AVCaptureSession
. (I suppose what happens behind the scene is that the AVCaptureSession
registers with the AVCaptureMetadataOutput
the various types of barcodes it could recognize.)
UPDATE: This also works for realtime face detection! s/AVMetadataMachineReadableCodeObject
/AVMetadataFaceObject
/ and set AVMetadataObject.TypeFace
!
Here's a complete program in Xamarin.iOS that recognizes QR and standard Ean13 barcodes:
[code lang="csharp"]
using System;
using System.Collections.Generic;
using System.Linq;
using MonoTouch.Foundation;
using MonoTouch.UIKit;
using System.Drawing;
using MonoTouch.CoreGraphics;
using MonoTouch.AVFoundation;
using MonoTouch.CoreFoundation;
namespace SingleFileSolution
{
public class ContentView : UIView
{
AVCaptureVideoPreviewLayer layer;
public ContentView(UIColor fillColor, AVCaptureVideoPreviewLayer layer, MyMetadataOutputDelegate metadataSource)
{
BackgroundColor = fillColor;
this.layer = layer;
layer.MasksToBounds = true;
layer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill;
Frame = UIScreen.MainScreen.Bounds;
layer.Frame = Frame;
Layer.AddSublayer(layer);
var label = new UILabel(new RectangleF(40, 80, UIScreen.MainScreen.Bounds.Width - 80, 80));
AddSubview(label);
metadataSource.MetadataFound += (s, e) => label.Text = e.StringValue;
}
public override void LayoutSubviews()
{
base.LayoutSubviews();
layer.Frame = Bounds;
}
}
public class MyMetadataOutputDelegate : AVCaptureMetadataOutputObjectsDelegate
{
public override void DidOutputMetadataObjects(AVCaptureMetadataOutput captureOutput, AVMetadataObject[] metadataObjects, AVCaptureConnection connection)
{
foreach(var m in metadataObjects)
{
if(m is AVMetadataMachineReadableCodeObject)
{
MetadataFound(this, m as AVMetadataMachineReadableCodeObject);
}
}
}
public event EventHandler\<AVMetadataMachineReadableCodeObject> MetadataFound = delegate {};
}
public class SimpleViewController : UIViewController
{
AVCaptureSession session;
AVCaptureMetadataOutput metadataOutput;
public SimpleViewController() : base()
{
}
public override void DidReceiveMemoryWarning()
{
// Releases the view if it doesn't have a superview.
base.DidReceiveMemoryWarning();
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
session = new AVCaptureSession();
var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
var input = AVCaptureDeviceInput.FromDevice(camera);
session.AddInput(input);
//Add the metadata output channel
metadataOutput = new AVCaptureMetadataOutput();
var metadataDelegate = new MyMetadataOutputDelegate();
metadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
session.AddOutput(metadataOutput);
//Confusing! *After* adding to session, tell output what to recognize...
foreach(var t in metadataOutput.AvailableMetadataObjectTypes)
{
Console.WriteLine(t);
}
metadataOutput.MetadataObjectTypes = new NSString[] {
AVMetadataObject.TypeQRCode,
AVMetadataObject.TypeEAN13Code
};
var previewLayer = new AVCaptureVideoPreviewLayer(session);
var view = new ContentView(UIColor.Blue, previewLayer, metadataDelegate);
session.StartRunning();
this.View = view;
}
}
[Register("AppDelegate")]
public class AppDelegate : UIApplicationDelegate
{
UIWindow window;
SimpleViewController viewController;
public override bool FinishedLaunching(UIApplication app, NSDictionary options)
{
window = new UIWindow(UIScreen.MainScreen.Bounds);
viewController = new SimpleViewController();
window.RootViewController = viewController;
window.MakeKeyAndVisible();
return true;
}
}
public class Application
{
static void Main(string[] args)
{
UIApplication.Main(args, null, "AppDelegate");
}
}
}
[/code]