Home>

Using the sample code on the net, the place where you tapped the image
I'm trying an app that gets RGB values,
It doesn't work.

Even if you load the image and tap anywhere
A numerical value different from the color of that place will appear.

Display RGB values ​​with printlabel placed on storyubord,
I am trying to display the UIView of that color with clorview.

Corresponding source code
import UIKit
class TapViewController: UIViewController {

    @IBOutlet var imageView: UIImageView!
    @IBOutlet var printlabel: UILabel!
    @IBOutlet var clorview: UIView!
    var image = UIImage ()
    // Variable for tap coordinates of the displayed image
    var tapPoint = CGPoint (x: 0, y: 0)
    override func viewDidLoad () {
    }
    override func didReceiveMemoryWarning () {
        super.didReceiveMemoryWarning ()
        // Dispose of any resources that can be recreated.
    }
    // Determine the color when tapping imageview
    @IBAction func getImageRGB (_ sender: UITapGestureRecognizer) {
        guard imageView.image! = nil else {return}
        // Get the tapped coordinates
        tapPoint = sender.location (in: imageView)
        let cgImage = imageView.image? .cgImage!
        let pixelData = cgImage? .DataProvider! .Data
        let data: UnsafePointer = CFDataGetBytePtr (pixelData)
        // Number of bytes per pixel
        let bytesPerPixel = (cgImage? .bitsPerPixel)!/8
        // Number of bytes per line
        let bytesPerRow = (cgImage? .BytesPerRow)!
        print ("bytesPerPixel = \ (bytesPerPixel) bytesPerRow = \ (bytesPerRow)")
        // Calculate the address corresponding to the coordinates of the tapped position
        let pixelAd: Int = Int (tapPoint.y) * bytesPerRow + Int (tapPoint.x) * bytesPerPixel
        // Take RGBA values ​​respectively
        let r = CGFloat (Int (CGFloat (data [pixelAd]))) /// CGFloat (255.0) * 100))/100
        let g = CGFloat (Int (CGFloat (data [pixelAd + 1]))) /// CGFloat (255.0) * 100))/100
        let b = CGFloat (Int (CGFloat (data [pixelAd + 2]))) /// CGFloat (255.0) * 100))/100
        let a = CGFloat (Int (CGFloat (data [pixelAd + 3])/CGFloat (255.0) * 100))/100
        print ([r, g, b, a])
        // Display the result in the navigation bar
        let R = "R:" + String (Int (r))
        let G = "G:" + String (Int (g))
        let B = "B:" + String (Int (b))
        let A = "A:" + String (format: "% .1f", a)
        navigationItem.title = R + G + B + A
        printlabel.text = "\ ([r, g, b, a])"
        clorview.backgroundColor = UIColor (red: CGFloat (r/255), green: CGFloat (g/255), blue: CGFloat (b/255), alpha: 1.0)}
}

// Select an image
extension TapViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
    // Processing when selecting an image
    func imagePickerController (_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String: Any]) {
        let selectedImage = info [UIImagePickerControllerOriginalImage] as! UIImage
        // compress the size
        // let resizedImage = selectedImage.scale (byFactor: 0.4)
        image = selectedImage
        var imageHeight = image.size.height
        var imageWidth = image.size.width
        let navigationBarHeight = navigationController? .NavigationBar.frame.height
        let width = self.view.frame.width
        let height = self.view.frame.height
        let centerX = self.view.center.x
        let centerY = self.view.center.y
        let widthRatio = imageWidth
        let heightRatio = imageHeight
        // Change the size of iamgeview according to the size of the image
        if imageHeight>self.view.frame.height || imageWidth>self.view.frame.width {
            print ("1")
            imageWidth = width
            imageHeight = width * heightRatio/widthRatio
        } else if imageHeight>self.view.frame.height {
            print ("2")
            imageHeight = height
            imageWidth = height * widthRatio/heightRatio
        } else if imageWidth>self.view.frame.width {
            print ("3")
            imageWidth = width
            imageHeight = width * heightRatio/widthRatio
        } else {
        }
        imageView.contentMode = UIViewContentMode.scaleAspectFill
        imageView.frame.size = CGSize (width: imageWidth, height: imageHeight)
        // Prevent the image from covering the navigation bar
        if imageHeight/2>(height/2 --navigationBarHeight!) {
            print ("4")
            imageView.center = CGPoint (x: centerX, y: centerY + navigationBarHeight!)
        } else {
            print ("5")
            imageView.center = CGPoint (x: centerX, y: centerY)
        }
        imageView.image = image
        picker.dismiss (animated: true, completion: nil)
    }

    // Called when shooting is canceled
    func imagePickerControllerDidCancel (_ picker: UIImagePickerController) {
        picker.dismiss (animated: true, completion: nil)
    }
    func tappedlibrary () {let sourceType: UIImagePickerControllerSourceType =
            UIImagePickerControllerSourceType.photoLibrary
        if UIImagePickerController.isSourceTypeAvailable (
            UIImagePickerControllerSourceType.photoLibrary) {
            // Create an instance
            let cameraPicker = UIImagePickerController ()
            cameraPicker.sourceType = sourceType
            cameraPicker.delegate = self
            self.present (cameraPicker, animated: true, completion: nil)
        }
        else {
            print ("error")
        }
    }
    func tappedcamera () {
        let sourceType: UIImagePickerControllerSourceType =
            UIImagePickerControllerSourceType.camera
        // Check if the camera is available
        if UIImagePickerController.isSourceTypeAvailable (
            UIImagePickerControllerSourceType.camera) {
            // Create an instance
            let cameraPicker = UIImagePickerController ()
            cameraPicker.sourceType = sourceType
            cameraPicker.delegate = self
            self.present (cameraPicker, animated: true, completion: nil)
        }
        else {
            print ("error")
        }
    }
    @IBAction func selecteImageButton (_ sender: UITapGestureRecognizer) {
        // For alert display
        let actionSheet = UIAlertController (title: "", message: "Select Photo", preferredStyle: UIAlertControllerStyle.actionSheet)
        let tappedcamera = UIAlertAction (title: "shoot with camera", style: UIAlertActionStyle.default, handler: {
            (action: UIAlertAction!) In
            self.tappedcamera ()
        })
        let tappedlibrary = UIAlertAction (title: "Select from library", style: UIAlertActionStyle.default, handler: {
            (action: UIAlertAction!) In
            self.tappedlibrary ()
        })
        let cancel = UIAlertAction (title: "Cancel", style: UIAlertActionStyle.cancel, handler: {
            (action: UIAlertAction!) In
            print ("cancel")
        })
        actionSheet.addAction (tappedcamera)
        actionSheet.addAction (tappedlibrary)
        actionSheet.addAction (cancel)
        present (actionSheet, animated: true, completion: nil)
    }
}
Supplementary information (FW/tool version, etc.)

Xcode 11.3.1
Test actual iPhone11 iOS 13.4.1

  • Answer # 1

    I don't think it's wrong as a basic technique, except that there is an extra typecast.

    It seems that the reason why the correct color is not obtained is that the coordinates of the touched part on the screen and the coordinates on the bitmap saved as cgImage in UIImage do not correspond.

    As a trial, how to display the image.topLeftI confirmed that I could get the expected color by changing the pixels of the bitmap so that there is a one-to-one correspondence and examining the colors.

    So, this is a fundamental solution, but this iscgImageIt seems that we need to add more processing such as coordinate conversion based on the number of pixels that can be obtained by the structure and the number of pixels that are displayed.

    CGImage
    A bitmap image or image mask.

    With the code that is referenced, it seems that it can be applied as it is when the number of pixels of the bitmap is clearly smaller than the number of pixels of the device, but when the number of pixels of the bitmap is larger than the number of pixels of the device I think it needs to be calculated accurately based on the number of pixels in the bitmap.

    UIImagesizeI calculate the size of the image with the structure, but I think that it is the size on the display, not the actual number of pixels.

    size
    The logical dimensions, in points, for the image.

    In short

    cgImageofwidthandheightFind the bitmap dimensions as the actual number of pixels from

    If the number of pixels is larger than the display area, resize it appropriately. At that time, be careful not to change the aspect ratio of the actual image.

    UIImage.topLeftSet the display method based on the actual number of pixels, such as

    The coordinates acquired by tapGestureRecognizer,cgImageConvert to coordinates of

    Directly manipulate the pointer to find the RGBA value (this is possible with the current code)

    It means that it is necessary to follow the flow.

    Due to the large amount of work, it is not possible to verify all of the above, but I hope it will be helpful.