Module: Daltonize

Defined in:
lib/daltonize.rb

Class Method Summary collapse

Class Method Details

.daltonize(image, simulate, distribute) ⇒ Object



31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# File 'lib/daltonize.rb', line 31

def self.daltonize (image, simulate, distribute)
    # remove any alpha channel before processing
    alpha = nil
    if image.bands == 4
        alpha = image.extract_band(3)
        image = image.extract_band(0, 3)
    end

    begin
        # import to CIELAB with lcms
        # if there's no profile there, we'll fall back to the thing below
        cielab = image.icc_import_embedded(:relative)
        xyz = cielab.lab_to_xyz()
    rescue VIPS::Error
        # nope .. use the built-in srgb converter instead
        xyz = image.srgb_to_xyz()
        cielab = xyz.xyz_to_lab()
    end

    # pre-multiply our colour matrix
    #
    # reading right to left, we want to take our D65 XYZ to E, then to 
    # Bradford cone space, then through the simulation matrix, then back 
    # to XYZ, then back to D65
    #
    # we need to use E because we will be juggling the colour channels and we
    # want them all to have the same range so as not to disturb the neutral
    # axis
    m = E2D65 * BRAD2XYZ * Matrix.rows(simulate) * XYZ2BRAD * D652E

    # simulate colour-blindness
    xyz2 = xyz.recomb(m.to_a())

    # now find the error in CIELAB
    cielab2 = xyz2.xyz_to_lab()
    err = cielab - cielab2

    # add the error channels back to the original, recombined so as to hit
    # channels the person is sensitive to
    cielab = cielab + err.recomb(distribute)

    # .. and back to sRGB 
    image = cielab.lab_to_xyz().xyz_to_srgb()

    # reattach any alpha we saved above
    if alpha
      image = image.bandjoin(alpha.clip2fmt(image.band_fmt))
    end

    return image
end

.daltonize_file(source, destination, type) ⇒ Object



128
129
130
131
132
# File 'lib/daltonize.rb', line 128

def self.daltonize_file(source, destination, type)
  im = VIPS::Image.new(source)
  im = self.send(type.to_sym, im)
  im.write(destination)
end

.deuteranope(image) ⇒ Object



83
84
85
86
87
88
89
90
91
92
93
94
95
96
# File 'lib/daltonize.rb', line 83

def self.deuteranope(image)
  # deuteranopes are missing green receptors, so to simulate their vision 
  # we replace the green signal with a 70/30 mix of red and blue
  #
  # to compensate, we put 50% of the red/green error into lightness and 100%
  # into yellow/blue
  self.daltonize(image,
            [[  1,   0,   0],
             [0.7,   0, 0.3],
             [  0,   0,   1]], 
            [[  1, 0.5,   0],
             [  0,   0,   0],
             [  0,   1,   1]])
end

.protanope(image) ⇒ Object



98
99
100
101
102
103
104
105
106
107
108
109
110
111
# File 'lib/daltonize.rb', line 98

def self.protanope(image)
  # protanopes are missing red receptors --- we simulate their condition by
  # replacing the red signal with an 80/20 mix of green and blue (since 
  # blue is far less important than green)
  #
  # compensate as for deuts
  self.daltonize(image,
            [[  0, 0.8, 0.2],
             [  0,   1,   0],
             [  0,   0,   1]], 
            [[  1, 0.5,   0],
             [  0,   0,   0],
             [  0,   1,   1]])
end

.tritanope(image) ⇒ Object



113
114
115
116
117
118
119
120
121
122
123
124
125
126
# File 'lib/daltonize.rb', line 113

def self.tritanope(image)
  # tritanopes are missing blue receptors --- we replace the blue signal
  # with 30/70 red/green
  #
  # to compensate, we put 50% of the yellow/blue error into lightness, and 
  # 100% into red/green
  self.daltonize(image,
            [[  1,   0,   0],
             [  0,   1,   0],
             [0.3, 0.7,   0]], 
            [[  1,   0, 0.5],
             [  0,   0,   1],
             [  0,   0,   0]])
end