]>
Commit | Line | Data |
---|---|---|
1 | use crate::formats::*; | |
2 | use super::*; | |
3 | use super::kernel::Kernel; | |
4 | ||
5 | #[derive(Default,Clone,Copy,PartialEq,Debug)] | |
6 | pub struct Pixel { | |
7 | pub r: u8, | |
8 | pub g: u8, | |
9 | pub b: u8, | |
10 | } | |
11 | ||
12 | impl Pixel { | |
13 | #[allow(dead_code)] | |
14 | fn new(src: &[u8]) -> Self { | |
15 | Self { r: src[0], g: src[1], b: src[2] } | |
16 | } | |
17 | fn to_rgb(&self) -> [u8; 3] { | |
18 | [self.r, self.g, self.b] | |
19 | } | |
20 | fn dist(&self, pix: Pixel) -> u32 { | |
21 | let dr = i32::from(self.r) - i32::from(pix.r); | |
22 | let dg = i32::from(self.g) - i32::from(pix.g); | |
23 | let db = i32::from(self.b) - i32::from(pix.b); | |
24 | (dr * dr + dg * dg + db * db) as u32 | |
25 | } | |
26 | fn min(&self, pix: Pixel) -> Pixel { | |
27 | Pixel { r: self.r.min(pix.r), g: self.g.min(pix.g), b: self.b.min(pix.b) } | |
28 | } | |
29 | fn max(&self, pix: Pixel) -> Pixel { | |
30 | Pixel { r: self.r.max(pix.r), g: self.g.max(pix.g), b: self.b.max(pix.b) } | |
31 | } | |
32 | } | |
33 | ||
34 | #[allow(dead_code)] | |
35 | fn avg_u8(a: u8, b: u8) -> u8 { | |
36 | (a & b) + ((a ^ b) >> 1) | |
37 | } | |
38 | ||
39 | mod elbg; | |
40 | mod mediancut; | |
41 | mod neuquant; | |
42 | mod palettise; | |
43 | ||
44 | //use elbg::ELBG; | |
45 | //use mediancut::quantise_median_cut; | |
46 | //use neuquant::NeuQuantQuantiser; | |
47 | ||
48 | #[derive(Clone,Copy,Debug,PartialEq)] | |
49 | /// Palette quantisation algorithms. | |
50 | pub enum QuantisationMode { | |
51 | /// Median cut approach proposed by Paul Heckbert. | |
52 | /// | |
53 | /// This is moderately fast and moderately good. | |
54 | MedianCut, | |
55 | /// Enhanced LBG algorithm proposed by Giuseppe Patane and Marco Russo. | |
56 | /// | |
57 | /// This is slow but good method. | |
58 | ELBG, | |
59 | /// NeuQuant algorithm proposed by Anthony Dekker. | |
60 | /// | |
61 | /// It may operate on randomly subsampled image with subsampling factors 1-30. | |
62 | /// This algorithm is fast especially with high subsampling factor but output palette may be far from optimal one. | |
63 | NeuQuant(u8), | |
64 | } | |
65 | ||
66 | impl Default for QuantisationMode { | |
67 | fn default() -> Self { QuantisationMode::MedianCut } | |
68 | } | |
69 | ||
70 | #[derive(Clone,Copy,Debug,PartialEq)] | |
71 | /// Algorithms for seaching an appropriate palette entry for a given pixel. | |
72 | pub enum PaletteSearchMode { | |
73 | /// Full search (slowest). | |
74 | Full, | |
75 | /// Local search (faster but may be not so good). | |
76 | Local, | |
77 | /// k-d tree based one (the fastest but not so accurate). | |
78 | KDTree, | |
79 | } | |
80 | ||
81 | impl Default for PaletteSearchMode { | |
82 | fn default() -> Self { PaletteSearchMode::Local } | |
83 | } | |
84 | ||
85 | use crate::scale::palette::elbg::ELBG; | |
86 | use crate::scale::palette::mediancut::quantise_median_cut; | |
87 | use crate::scale::palette::neuquant::NeuQuantQuantiser; | |
88 | use crate::scale::palette::palettise::*; | |
89 | ||
90 | fn palettise_frame_internal(pic_in: &NABufferType, pic_out: &mut NABufferType, qmode: QuantisationMode, palmode: PaletteSearchMode, pixels: &mut Vec<Pixel>) -> ScaleResult<()> { | |
91 | if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) { | |
92 | let ioff = sbuf.get_offset(0); | |
93 | let (w, h) = sbuf.get_dimensions(0); | |
94 | let istride = sbuf.get_stride(0); | |
95 | let ifmt = sbuf.get_info().get_format(); | |
96 | let sdata1 = sbuf.get_data(); | |
97 | let sdata = &sdata1[ioff..]; | |
98 | ||
99 | let doff = dbuf.get_offset(0); | |
100 | let paloff = dbuf.get_offset(1); | |
101 | let dstride = dbuf.get_stride(0); | |
102 | let ofmt = dbuf.get_info().get_format(); | |
103 | let dst = dbuf.get_data_mut().unwrap(); | |
104 | ||
105 | pixels.clear(); | |
106 | if !ifmt.is_unpacked() { | |
107 | let esize = ifmt.elem_size as usize; | |
108 | let coffs = [ifmt.comp_info[0].unwrap().comp_offs as usize, ifmt.comp_info[1].unwrap().comp_offs as usize, ifmt.comp_info[2].unwrap().comp_offs as usize]; | |
109 | for src in sdata.chunks(istride).take(h) { | |
110 | for chunk in src.chunks_exact(esize).take(w) { | |
111 | let pixel = Pixel{ r: chunk[coffs[0]], g: chunk[coffs[1]], b: chunk[coffs[2]] }; | |
112 | pixels.push(pixel); | |
113 | } | |
114 | } | |
115 | } else { | |
116 | let mut roff = ioff; | |
117 | let mut goff = sbuf.get_offset(1); | |
118 | let mut boff = sbuf.get_offset(2); | |
119 | let rstride = istride; | |
120 | let gstride = sbuf.get_stride(1); | |
121 | let bstride = sbuf.get_stride(2); | |
122 | for _ in 0..h { | |
123 | for x in 0..w { | |
124 | let pixel = Pixel{ r: sdata[roff + x], g: sdata[goff + x], b: sdata[boff + x] }; | |
125 | pixels.push(pixel); | |
126 | } | |
127 | roff += rstride; | |
128 | goff += gstride; | |
129 | boff += bstride; | |
130 | } | |
131 | } | |
132 | let mut pal = [[0u8; 3]; 256]; | |
133 | match qmode { | |
134 | QuantisationMode::ELBG => { | |
135 | let mut elbg = ELBG::new_random(); | |
136 | elbg.quantise(pixels.as_slice(), &mut pal); | |
137 | }, | |
138 | QuantisationMode::MedianCut => { | |
139 | quantise_median_cut(pixels.as_slice(), &mut pal); | |
140 | }, | |
141 | QuantisationMode::NeuQuant(fact) => { | |
142 | let mut nq = NeuQuantQuantiser::new(fact as usize); | |
143 | nq.learn(pixels.as_slice()); | |
144 | nq.make_pal(&mut pal); | |
145 | }, | |
146 | }; | |
147 | let esize = ofmt.elem_size as usize; | |
148 | let coffs = [ofmt.comp_info[0].unwrap().comp_offs as usize, ofmt.comp_info[1].unwrap().comp_offs as usize, ofmt.comp_info[2].unwrap().comp_offs as usize]; | |
149 | for (dpal, spal) in (&mut dst[paloff..]).chunks_mut(esize).zip(pal.iter()) { | |
150 | dpal[coffs[0]] = spal[0]; | |
151 | dpal[coffs[1]] = spal[1]; | |
152 | dpal[coffs[2]] = spal[2]; | |
153 | } | |
154 | ||
155 | let dst = &mut dst[doff..]; | |
156 | match palmode { | |
157 | PaletteSearchMode::Full => { | |
158 | for (dline, sline) in dst.chunks_mut(dstride).take(h).zip(pixels.chunks(w)) { | |
159 | for (didx, pix) in dline.iter_mut().take(w).zip(sline.iter()) { | |
160 | let rgb = pix.to_rgb(); | |
161 | *didx = find_nearest(&rgb, &pal) as u8; | |
162 | } | |
163 | } | |
164 | }, | |
165 | PaletteSearchMode::Local => { | |
166 | let ls = LocalSearch::new(&pal); | |
167 | for (dline, sline) in dst.chunks_mut(dstride).take(h).zip(pixels.chunks(w)) { | |
168 | for (didx, pix) in dline.iter_mut().take(w).zip(sline.iter()) { | |
169 | *didx = ls.search(pix.to_rgb()) as u8; | |
170 | } | |
171 | } | |
172 | }, | |
173 | PaletteSearchMode::KDTree => { | |
174 | let kdtree = KDTree::new(&pal); | |
175 | for (dline, sline) in dst.chunks_mut(dstride).take(h).zip(pixels.chunks(w)) { | |
176 | for (didx, pix) in dline.iter_mut().take(w).zip(sline.iter()) { | |
177 | *didx = kdtree.search(pix.to_rgb()) as u8; | |
178 | } | |
179 | } | |
180 | }, | |
181 | }; | |
182 | Ok(()) | |
183 | } else { | |
184 | Err(ScaleError::InvalidArgument) | |
185 | } | |
186 | } | |
187 | ||
188 | /// Converts packed RGB frame into palettised one. | |
189 | /// | |
190 | /// This function can operate in several modes of both palette generation and colour substitution with palette indices. | |
191 | /// Some may work fast but produce worse output image. | |
192 | /// See [`QuantisationMode`] and [`PaletteSearchMode`] for details. | |
193 | /// If you are not sure what to use there are `QuantisationMode::default()` and `PaletteSearchMode::default()`. | |
194 | /// | |
195 | /// [`QuantisationMode`]: ./enum.QuantisationMode.html | |
196 | /// [`PaletteSearchMode`]: ./enum.PaletteSearchMode.html | |
197 | pub fn palettise_frame(pic_in: &NABufferType, pic_out: &mut NABufferType, qmode: QuantisationMode, palmode: PaletteSearchMode) -> ScaleResult<()> { | |
198 | let size; | |
199 | if let Some(ref vbuf) = pic_in.get_vbuf() { | |
200 | //todo check format for being packed RGB in and pal out | |
201 | let (w, h) = vbuf.get_dimensions(0); | |
202 | size = w * h; | |
203 | } else { | |
204 | return Err(ScaleError::InvalidArgument); | |
205 | } | |
206 | let mut pixels = Vec::with_capacity(size); | |
207 | palettise_frame_internal(pic_in, pic_out, qmode, palmode, &mut pixels) | |
208 | } | |
209 | ||
210 | ||
211 | #[derive(Default)] | |
212 | struct PalettiseKernel { | |
213 | pixels: Vec<Pixel>, | |
214 | qmode: QuantisationMode, | |
215 | palmode: PaletteSearchMode, | |
216 | } | |
217 | ||
218 | impl PalettiseKernel { | |
219 | fn new() -> Self { Self::default() } | |
220 | } | |
221 | ||
222 | impl Kernel for PalettiseKernel { | |
223 | fn init(&mut self, in_fmt: &ScaleInfo, _dest_fmt: &ScaleInfo, options: &[(String, String)]) -> ScaleResult<NABufferType> { | |
224 | for (name, value) in options.iter() { | |
225 | match name.as_str() { | |
226 | "pal.quant" => { | |
227 | self.qmode = match value.as_str() { | |
228 | "mediancut" => QuantisationMode::MedianCut, | |
229 | "elbg" => QuantisationMode::ELBG, | |
230 | "neuquant" => QuantisationMode::NeuQuant(3), | |
231 | _ => QuantisationMode::default(), | |
232 | }; | |
233 | }, | |
234 | "pal.search" => { | |
235 | self.palmode = match value.as_str() { | |
236 | "full" => PaletteSearchMode::Full, | |
237 | "local" => PaletteSearchMode::Local, | |
238 | "kdtree" => PaletteSearchMode::KDTree, | |
239 | _ => PaletteSearchMode::default(), | |
240 | }; | |
241 | }, | |
242 | _ => {}, | |
243 | }; | |
244 | } | |
245 | ||
246 | self.pixels = Vec::with_capacity(in_fmt.width * in_fmt.height); | |
247 | let res = alloc_video_buffer(NAVideoInfo::new(in_fmt.width, in_fmt.height, false, PAL8_FORMAT), 0); | |
248 | if res.is_err() { return Err(ScaleError::AllocError); } | |
249 | Ok(res.unwrap()) | |
250 | } | |
251 | fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) { | |
252 | palettise_frame_internal(pic_in, pic_out, self.qmode, self.palmode, &mut self.pixels).unwrap(); | |
253 | } | |
254 | } | |
255 | ||
256 | pub fn create_palettise() -> Box<dyn Kernel> { | |
257 | Box::new(PalettiseKernel::new()) | |
258 | } |