-1

the problem isnt getting the pixel data i was able to find some source for that

-(NSArray *)getRGBAtLocationOnImage:(UIImage *)theImage X:(int)x Y:(int)y
{
    // First get the image into your data buffer
    CGImageRef image = [theImage CGImage];
    NSUInteger width = CGImageGetWidth(image);
    NSUInteger height = CGImageGetHeight(image);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    unsigned char *rawData = malloc(height * width * 4);
    NSUInteger bytesPerPixel = 4;
    NSUInteger bytesPerRow = bytesPerPixel * width;
    NSUInteger bitsPerComponent = 8;
    CGContextRef context = CGBitmapContextCreate(rawData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
    CGColorSpaceRelease(colorSpace);

    CGContextDrawImage(context, CGRectMake(0, 0, width, height),image);
    CGContextRelease(context);

    // Now your rawData contains the image data in the RGBA8888 pixel format.
    int byteIndex = (bytesPerRow * y) + x * bytesPerPixel;
    int red = rawData[byteIndex];
    int green = rawData[byteIndex + 1];
    int blue = rawData[byteIndex + 2];
    //int alpha = rawData[byteIndex + 3];

    NSLog(@"Red: %d   Green: %d    Blue: %d",red,green,blue);

    NSArray *i = [[NSArray alloc] initWithObjects:[NSNumber numberWithInt:red], [NSNumber numberWithInt:green], [NSNumber numberWithInt:blue], nil];

    free(rawData);
    return i;
}

the problem is i the location of the pixels i want to get. i have no idea how to figure out where the pixels i want to get are located. what is a way of figuring that out.

lufthansa747
  • 1,502
  • 3
  • 17
  • 35
  • 4
    Well -- if you don't know which pixels do you want, what do you expect? –  Jan 22 '12 at 15:34
  • 1
    What exactly is it that you want to achieve? In this code example you pass `x` and `y` in so the position is known beforehand. What information do you have that you want to convert to a position? – Dennis Bliefernicht Jan 22 '12 at 15:35
  • the problem is i i dont know the x and y before hand. i get the image from the camera. right after the the picture is taken i print out the height and width. the value are 3264 x 2448 the i print the value again after i make the CGimageRef and they are reversed. the value is 2448 x 3264. why is that. – lufthansa747 Jan 22 '12 at 16:07

1 Answers1

0

Not sure to undestand your issue, but...

Take a look at your method:

-(NSArray *)getRGBAtLocationOnImage:(UIImage *)theImage X:(int)x Y:(int)y {
    // Your method
}

It waits for x and y and returns i, an array containing the RGB data of the point (x,y) you passed.

Suppose to have an image 100x100 pixels, you have to call your method 10000 times (one per pixel) if you want to check all the pixels in your image.

In that case, you can try something like this:

NSMutableArray *RGBImage = [[NSMutableArray alloc] initWithObjects:nil];
    for (int k = 0; k < IMAGE_WIDTH; k++) {
        for (j = 0; j < IMAGE_HEIGHT; j++) {
            NSArray *RGBPixel = [self getRGBAtLocationOnImage:theImage X:k Y:j]
            [RGBImage addObject:RGBPixel];
        }
    }
Giuseppe Garassino
  • 2,214
  • 1
  • 23
  • 42
  • i know i only want to grab like 10 specific pixels from the picture. the problem is i don't know where those pixels are. – lufthansa747 Jan 22 '12 at 16:08
  • So you have to figure out what you are looking for... For example if you are looking for dark pixels you can check all your image, like I suggested, and then take only the values you are interested in. – Giuseppe Garassino Jan 22 '12 at 16:21
  • the problem is i i dont know the x and y before hand. i get the image from the camera. right after the the picture is taken i print out the height and width. the value are 3264 x 2448 the i print the value again after i make the CGimageRef and they are reversed. the value is 2448 x 3264. why is that – lufthansa747 Jan 22 '12 at 16:40