本文介绍了iOS利用摄像头获取环境光感参数的方法,分享给大家,具体如下: 不多说,代码如下: #import "LightSensitiveViewController.h"@import AVFoundation;#import ImageIO/ImageIO.h@interface LightSensiti
          本文介绍了iOS利用摄像头获取环境光感参数的方法,分享给大家,具体如下:
不多说,代码如下:
#import "LightSensitiveViewController.h"
@import AVFoundation;
#import <ImageIO/ImageIO.h>
@interface LightSensitiveViewController ()< AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *session;
@end
@implementation LightSensitiveViewController
- (void)viewDidLoad {
  [super viewDidLoad];
  // Do any additional setup after loading the view.
  self.view.backgroundColor = [UIColor whiteColor];
  self.navigationItem.title = @"光感";
  [self lightSensitive];
}
- (void)didReceiveMemoryWarning {
  [super didReceiveMemoryWarning];
  // Dispose of any resources that can be recreated.
}
#pragma mark- 光感
- (void)lightSensitive {
  // 1.获取硬件设备
  AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  // 2.创建输入流
  AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc]initWithDevice:device error:nil];
  // 3.创建设备输出流
  AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
  [output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  // AVCaptureSession属性
  self.session = [[AVCaptureSession alloc]init];
  // 设置为高质量采集率
  [self.session setSessionPreset:AVCaptureSessionPresetHigh];
  // 添加会话输入和输出
  if ([self.session canAddInput:input]) {
    [self.session addInput:input];
  }
  if ([self.session canAddOutput:output]) {
    [self.session addOutput:output];
  }
  // 9.启动会话
  [self.session startRunning];
}
#pragma mark- AVCaptureVideoDataOutputSampleBufferDelegate的方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  CFDictionaryRef metadataDict = CMCopyDictionaryOfAttachments(NULL,sampleBuffer, kCMAttachmentMode_ShouldPropagate);
  NSDictionary *metadata = [[NSMutableDictionary alloc] initWithDictionary:(__bridge NSDictionary*)metadataDict];
  CFRelease(metadataDict);
  NSDictionary *exifMetadata = [[metadata objectForKey:(NSString *)kCGImagePropertyExifDictionary] mutableCopy];
  float brightnessValue = [[exifMetadata objectForKey:(NSString *)kCGImagePropertyExifBrightnessValue] floatValue];
  NSLog(@"%f",brightnessValue);
  // 根据brightnessValue的值来打开和关闭闪光灯
  AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  BOOL result = [device hasTorch];// 判断设备是否有闪光灯
  if ((brightnessValue < 0) && result) {// 打开闪光灯
    [device lockForConfiguration:nil];
    [device setTorchMode: AVCaptureTorchModeOn];//开
    [device unlockForConfiguration];
  }else if((brightnessValue > 0) && result) {// 关闭闪光灯
    [device lockForConfiguration:nil];
    [device setTorchMode: AVCaptureTorchModeOff];//关
    [device unlockForConfiguration];
  }
}
@end
注意点:
- 首先引入AVFoundation框架和ImageIO/ImageIO.h声明文件
- 遵循AVCaptureVideoDataOutputSampleBufferDelegate协议
- AVCaptureSession对象要定义为属性,确保有对象在一直引用AVCaptureSession对象;否则如果在lightSensitive方法中定义并初始化AVCaptureSession对象,会造成AVCaptureSession对象提前释放, [self.session startRunning];会失效
- 实现AVCaptureVideoDataOutputSampleBufferDelegate的代理方法,参数brightnessValue就是周围环境的亮度参数了,范围大概在-5~~12之间,参数数值越大,环境越亮
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持自由互联。
