首页 > 解决方案 > Obj-c - 从相机视图中获取颜色?

问题描述

我希望我的用户能够点击他们屏幕上的一个区域,并让他们点击的颜色返回。我可以使用下面的代码成功地做到这一点。话虽如此,它返回 self.previewView 的背景颜色(白色),而不是在其子视图(即相机)内点击的颜色。

如何让我的以下代码返回在 self.videoPreviewLayer(self.previewView 的子视图)中点击的颜色?

视图控制器.h

#import <AVFoundation/AVFoundation.h>

@interface CaptureViewController : UIViewController <UIGestureRecognizerDelegate>

@property (nonatomic) AVCaptureSession *captureSession;
@property (nonatomic) AVCapturePhotoOutput *stillImageOutput;
@property (nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (weak, nonatomic) IBOutlet UIImageView *colorPicked;

@property (nonatomic) IBOutlet UIView *previewView;

视图控制器.m

- (void)viewDidAppear:(BOOL)animated {
    [super viewDidAppear:animated];
   

    self.captureSession = [AVCaptureSession new];
    self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
    
    AVCaptureDevice *backCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    if (!backCamera) {
        NSLog(@"Unable to access back camera!");
        return;
    }
    
    
    NSError *error;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera
                                                                        error:&error];
    if (!error) {
     
        
        self.stillImageOutput = [AVCapturePhotoOutput new];

        if ([self.captureSession canAddInput:input] && [self.captureSession canAddOutput:self.stillImageOutput]) {
            
            [self.captureSession addInput:input];
            [self.captureSession addOutput:self.stillImageOutput];
            [self setupLivePreview];
        }
        
        dispatch_queue_t globalQueue =  dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
        dispatch_async(globalQueue, ^{
            [self.captureSession startRunning];
           
        });
        
        dispatch_async(dispatch_get_main_queue(), ^{
            self.videoPreviewLayer.frame = self.previewView.bounds;
            
          
            
        });
        
    }
    else {
        NSLog(@"Error Unable to initialize back camera: %@", error.localizedDescription);
    }
    
    

    
}


- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    
    UITapGestureRecognizer * tapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapGesture:)];
     [self.previewView addGestureRecognizer:tapRecognizer];
     self.previewView.userInteractionEnabled = YES;
    
}

- (void)setupLivePreview {
    
    self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    if (self.videoPreviewLayer) {
        
        self.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
        self.videoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
        [self.previewView.layer addSublayer:self.videoPreviewLayer];
        
  
    }
}

-(void)tapGesture:(UITapGestureRecognizer *)recognizer  {
    
    CGPoint location = [recognizer locationInView:recognizer.view];
    CGPoint p = { round(location.x), round(location.y) };
    self.colorPicked.backgroundColor = [self colorInViewAtPoint:p];
    
    
    UIColor *mylovelycolor = [self colorInViewAtPoint:p];
     
 
       NSLog(@"MY LOVELY COLOR %@", mylovelycolor.CGColor);
       
       const CGFloat *components = CGColorGetComponents(mylovelycolor.CGColor);
       NSLog(@"Red: %f", components[0]);
       NSLog(@"Green: %f", components[1]);
       NSLog(@"Blue: %f", components[2]);
       NSLog(@"Alpha: %f", CGColorGetAlpha(mylovelycolor.CGColor));
       

           int red = components[0] * 255;
           int green = components[1] * 255;
           int blue = components[2] * 255;
       
    
}


-(UIColor *)colorInViewAtPoint:(CGPoint)p {
    unsigned char pixel[4] = {0};
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedLast);
    // correct panlocation vs bitmapt coordintes
    CGContextTranslateCTM(context, -p.x, -p.y);
    [self.previewView.layer renderInContext:context];
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
   
    return [UIColor colorWithRed:pixel[0]/255.0
                           green:pixel[1]/255.0
                            blue:pixel[2]/255.0
                           alpha:pixel[3]/255.0];
    

}

标签: iosobjective-c

解决方案


尝试使用以下代码:

转换点:</p>

- (CGPoint)convertPoint:(CGPoint)viewPoint fromView:(UIView *)view {

CGPoint imagePoint = viewPoint;

CGSize imageSize = self.size;
CGSize viewSize  = view.bounds.size;

CGFloat ratioX = viewSize.width / imageSize.width;
CGFloat ratioY = viewSize.height / imageSize.height;

UIViewContentMode contentMode = view.contentMode;

switch (contentMode) {
case UIViewContentModeScaleToFill:
case UIViewContentModeRedraw:
{
    imagePoint.x /= ratioX;
    imagePoint.y /= ratioY;
    break;
}

case UIViewContentModeScaleAspectFit:
case UIViewContentModeScaleAspectFill:
{
    CGFloat scale;

    if (contentMode == UIViewContentModeScaleAspectFit) {
        scale = MIN(ratioX, ratioY);
    }
    else /*if (contentMode == UIViewContentModeScaleAspectFill)*/ {
        scale = MAX(ratioX, ratioY);
    }

    // Remove the x or y margin added in FitMode
    imagePoint.x -= (viewSize.width  - imageSize.width  * scale) / 2.0f;
    imagePoint.y -= (viewSize.height - imageSize.height * scale) / 2.0f;

    imagePoint.x /= scale;
    imagePoint.y /= scale;

    break;
}

case UIViewContentModeCenter:
{
    imagePoint.x -= (viewSize.width - imageSize.width)  / 2.0f;
    imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;

    break;
}

case UIViewContentModeTop:
{
    imagePoint.x -= (viewSize.width - imageSize.width)  / 2.0f;

    break;
}

case UIViewContentModeBottom:
{
    imagePoint.x -= (viewSize.width - imageSize.width)  / 2.0f;
    imagePoint.y -= (viewSize.height - imageSize.height);

    break;
}

case UIViewContentModeLeft:
{
    imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;

    break;
}

case UIViewContentModeRight:
{
    imagePoint.x -= (viewSize.width - imageSize.width);
    imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;

    break;
}

case UIViewContentModeTopRight:
{
    imagePoint.x -= (viewSize.width - imageSize.width);

    break;
}


case UIViewContentModeBottomLeft:
{
    imagePoint.y -= (viewSize.height - imageSize.height);

    break;
}


case UIViewContentModeBottomRight:
{
    imagePoint.x -= (viewSize.width - imageSize.width);
    imagePoint.y -= (viewSize.height - imageSize.height);

    break;
}

case UIViewContentModeTopLeft:
default:
{
    break;
}
}
return imagePoint;
}

+ (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    //NSLog(@"imageFromSampleBuffer: called");

    // Get a CMSampleBuffer's Core Video image buffer for the media data
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);

    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

    CGFloat degrees = 90.f;
    CGFloat radians = degrees * (M_PI / 180.f);
    CGContextRotateCTM(context, radians);

    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);

    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    // Free up the context and color space
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);

    //UIDeviceOrientation *deviceOrientation = [[UIDevice currentDevice] orientation];

    // Create an image object from the Quartz image
    //    UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1 orientation:UIImageOrientationRight];
    UIImage *image = [UIImage imageWithCGImage:quartzImage];

    // Release the Quartz image
    CGImageRelease(quartzImage);

    return (image);
}

用点选色:</p>

- (UIColor*)pickColorWithPoint:(CGPoint)atPoint pixelFormatType: (UInt32)format {

UIColor* color = nil;

CGImageRef cgImage =  self.CGImage;
size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage);
NSUInteger x = (NSUInteger)floor(atPoint.x) * self.scale;
NSUInteger y = (NSUInteger)floor(atPoint.y) * self.scale;

if ((x < width) && (y < height)) {
    CGDataProviderRef provider = CGImageGetDataProvider(cgImage);
    CFDataRef bitmapData = CGDataProviderCopyData(provider);

    const UInt8 *data = CFDataGetBytePtr(bitmapData);

    size_t offset = ((width * y) + x) * 4;

    if (format == kCVPixelFormatType_32RGBA) {

        UInt8 red   = data[offset];
        UInt8 green = data[offset + 1];
        UInt8 blue  = data[offset + 2];
        UInt8 alpha = data[offset + 3];
        color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255.0f];

    } else if (format == kCVPixelFormatType_32BGRA) {

        UInt8 blue   = data[offset + 0];
        UInt8 green = data[offset + 1];
        UInt8 red  = data[offset + 2];

        color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:1.0f];
    }

    CFRelease(bitmapData);
}

return color;

//    CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage));
//    const UInt8* data = CFDataGetBytePtr(pixelData);
//    
//    int pixelInfo = ((self.size.width  * atPoint.y) + atPoint.x) * 4;
//    
//    UInt8 blue  = data[pixelInfo];
//    UInt8 green = data[(pixelInfo + 1)];
//    UInt8 red   = data[pixelInfo + 2];
//    UInt8 alpha   = data[pixelInfo + 3];
//    CFRelease(pixelData);
//    
//    UIColor* color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255];
//    
//    return color;

}

推荐阅读