]>
Commit | Line | Data |
---|---|---|
30d57e4a KS |
1 | //! Image conversion functionality. |
2 | ||
3 | //! # Examples | |
4 | //! | |
5 | //! Convert input image into YUV one and scale down two times. | |
6 | //! ```no_run | |
7 | //! use nihav_core::scale::*; | |
8 | //! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT}; | |
9 | //! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo}; | |
10 | //! | |
11 | //! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap(); | |
12 | //! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap(); | |
13 | //! let in_fmt = get_scale_fmt_from_pic(&in_pic); | |
14 | //! let out_fmt = get_scale_fmt_from_pic(&out_pic); | |
15 | //! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap(); | |
16 | //! scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
17 | //! ``` | |
03accf76 KS |
18 | use crate::frame::*; |
19 | ||
20 | mod kernel; | |
21 | ||
22 | mod colorcvt; | |
23 | mod repack; | |
b36f412c | 24 | #[allow(clippy::module_inception)] |
03accf76 KS |
25 | mod scale; |
26 | ||
4b459d0b KS |
27 | mod palette; |
28 | ||
29 | pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode}; | |
30 | ||
30d57e4a | 31 | /// Image format information used by the converter. |
03accf76 KS |
32 | #[derive(Clone,Copy,PartialEq)] |
33 | pub struct ScaleInfo { | |
30d57e4a | 34 | /// Pixel format description. |
03accf76 | 35 | pub fmt: NAPixelFormaton, |
30d57e4a | 36 | /// Image width. |
03accf76 | 37 | pub width: usize, |
30d57e4a | 38 | /// Image height. |
03accf76 KS |
39 | pub height: usize, |
40 | } | |
41 | ||
42 | impl std::fmt::Display for ScaleInfo { | |
43 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { | |
44 | write!(f, "({}x{}, {})", self.width, self.height, self.fmt) | |
45 | } | |
46 | } | |
47 | ||
30d57e4a | 48 | /// A list specifying general image conversion errors. |
03accf76 KS |
49 | #[derive(Debug,Clone,Copy,PartialEq)] |
50 | #[allow(dead_code)] | |
51 | pub enum ScaleError { | |
30d57e4a | 52 | /// Input or output buffer contains no image data. |
03accf76 | 53 | NoFrame, |
30d57e4a | 54 | /// Allocation failed. |
03accf76 | 55 | AllocError, |
30d57e4a | 56 | /// Invalid argument. |
03accf76 | 57 | InvalidArgument, |
30d57e4a | 58 | /// Feature is not implemented. |
03accf76 | 59 | NotImplemented, |
30d57e4a | 60 | /// Internal implementation bug. |
03accf76 KS |
61 | Bug, |
62 | } | |
63 | ||
30d57e4a | 64 | /// A specialised `Result` type for image conversion operations. |
03accf76 KS |
65 | pub type ScaleResult<T> = Result<T, ScaleError>; |
66 | ||
67 | /*trait Kernel { | |
68 | fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType>; | |
69 | fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType); | |
70 | }*/ | |
71 | ||
72 | struct KernelDesc { | |
73 | name: &'static str, | |
6011e201 | 74 | create: fn () -> Box<dyn kernel::Kernel>, |
03accf76 KS |
75 | } |
76 | ||
77 | impl KernelDesc { | |
6011e201 | 78 | fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> { |
03accf76 KS |
79 | for kern in KERNELS.iter() { |
80 | if kern.name == name { | |
81 | return Ok((kern.create)()); | |
82 | } | |
83 | } | |
84 | Err(ScaleError::InvalidArgument) | |
85 | } | |
86 | } | |
87 | ||
88 | const KERNELS: &[KernelDesc] = &[ | |
89 | KernelDesc { name: "pack", create: repack::create_pack }, | |
90 | KernelDesc { name: "unpack", create: repack::create_unpack }, | |
91 | KernelDesc { name: "depal", create: repack::create_depal }, | |
4b459d0b | 92 | KernelDesc { name: "palette", create: palette::create_palettise }, |
03accf76 KS |
93 | KernelDesc { name: "scale", create: scale::create_scale }, |
94 | KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv }, | |
95 | KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb }, | |
96 | ]; | |
97 | ||
98 | struct Stage { | |
99 | fmt_out: ScaleInfo, | |
100 | tmp_pic: NABufferType, | |
101 | next: Option<Box<Stage>>, | |
6011e201 | 102 | worker: Box<dyn kernel::Kernel>, |
03accf76 KS |
103 | } |
104 | ||
30d57e4a | 105 | /// Converts input picture information into format used by scaler. |
03accf76 KS |
106 | pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo { |
107 | let info = pic.get_video_info().unwrap(); | |
108 | ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() } | |
109 | } | |
110 | ||
111 | impl Stage { | |
112 | fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<Self> { | |
113 | let mut worker = KernelDesc::find(name)?; | |
114 | let tmp_pic = worker.init(in_fmt, dest_fmt)?; | |
115 | let fmt_out = get_scale_fmt_from_pic(&tmp_pic); | |
116 | Ok(Self { fmt_out, tmp_pic, next: None, worker }) | |
117 | } | |
118 | fn add(&mut self, new: Stage) { | |
119 | if let Some(ref mut next) = self.next { | |
120 | next.add(new); | |
121 | } else { | |
122 | self.next = Some(Box::new(new)); | |
123 | } | |
124 | } | |
125 | fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> { | |
126 | if let Some(ref mut nextstage) = self.next { | |
127 | self.worker.process(pic_in, &mut self.tmp_pic); | |
128 | nextstage.process(&self.tmp_pic, pic_out)?; | |
129 | } else { | |
130 | self.worker.process(pic_in, pic_out); | |
131 | } | |
132 | Ok(()) | |
133 | } | |
134 | fn drop_last_tmp(&mut self) { | |
135 | if let Some(ref mut nextstage) = self.next { | |
136 | nextstage.drop_last_tmp(); | |
137 | } else { | |
138 | self.tmp_pic = NABufferType::None; | |
139 | } | |
140 | } | |
141 | } | |
142 | ||
30d57e4a | 143 | /// Image format converter. |
03accf76 KS |
144 | pub struct NAScale { |
145 | fmt_in: ScaleInfo, | |
146 | fmt_out: ScaleInfo, | |
147 | just_convert: bool, | |
148 | pipeline: Option<Stage>, | |
149 | } | |
150 | ||
151 | fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> { | |
152 | if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); } | |
153 | if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) { | |
154 | return Err(ScaleError::InvalidArgument); | |
155 | } | |
156 | Ok(()) | |
157 | } | |
158 | ||
159 | fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType) | |
160 | { | |
161 | if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) { | |
79ec1d51 KS |
162 | let mut same = true; |
163 | let num_components = sbuf.get_info().get_format().get_num_comp(); | |
164 | for i in 0..num_components { | |
165 | if sbuf.get_stride(i) != dbuf.get_stride(i) { | |
166 | same = false; | |
167 | break; | |
168 | } | |
169 | if sbuf.get_offset(i) != dbuf.get_offset(i) { | |
170 | same = false; | |
171 | break; | |
172 | } | |
173 | } | |
174 | if same { | |
175 | let sdata = sbuf.get_data(); | |
176 | let ddata = dbuf.get_data_mut().unwrap(); | |
177 | ddata.copy_from_slice(&sdata[0..]); | |
178 | } else { | |
179 | let sdata = sbuf.get_data(); | |
180 | for comp in 0..num_components { | |
181 | let (_, h) = sbuf.get_dimensions(comp); | |
182 | let src = &sdata[sbuf.get_offset(comp)..]; | |
183 | let sstride = sbuf.get_stride(comp); | |
184 | let doff = dbuf.get_offset(comp); | |
185 | let dstride = dbuf.get_stride(comp); | |
4de972c7 KS |
186 | let ddata = dbuf.get_data_mut().unwrap(); |
187 | let dst = &mut ddata[doff..]; | |
188 | let copy_size = sstride.min(dstride); | |
189 | for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) { | |
190 | (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]); | |
191 | } | |
192 | } | |
193 | } | |
194 | } else if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf16(), pic_out.get_vbuf16()) { | |
195 | let mut same = true; | |
196 | let num_components = sbuf.get_info().get_format().get_num_comp(); | |
197 | for i in 0..num_components { | |
198 | if sbuf.get_stride(i) != dbuf.get_stride(i) { | |
199 | same = false; | |
200 | break; | |
201 | } | |
202 | if sbuf.get_offset(i) != dbuf.get_offset(i) { | |
203 | same = false; | |
204 | break; | |
205 | } | |
206 | } | |
207 | if same { | |
208 | let sdata = sbuf.get_data(); | |
209 | let ddata = dbuf.get_data_mut().unwrap(); | |
210 | ddata.copy_from_slice(&sdata[0..]); | |
211 | } else { | |
212 | let sdata = sbuf.get_data(); | |
213 | for comp in 0..num_components { | |
214 | let (_, h) = sbuf.get_dimensions(comp); | |
215 | let src = &sdata[sbuf.get_offset(comp)..]; | |
216 | let sstride = sbuf.get_stride(comp); | |
217 | let doff = dbuf.get_offset(comp); | |
218 | let dstride = dbuf.get_stride(comp); | |
79ec1d51 KS |
219 | let ddata = dbuf.get_data_mut().unwrap(); |
220 | let dst = &mut ddata[doff..]; | |
221 | let copy_size = sstride.min(dstride); | |
222 | for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) { | |
223 | (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]); | |
224 | } | |
225 | } | |
226 | } | |
03accf76 KS |
227 | } else { |
228 | unimplemented!(); | |
229 | } | |
230 | } | |
231 | ||
232 | macro_rules! add_stage { | |
233 | ($head:expr, $new:expr) => { | |
234 | if let Some(ref mut h) = $head { | |
235 | h.add($new); | |
236 | } else { | |
237 | $head = Some($new); | |
238 | } | |
239 | }; | |
240 | } | |
241 | fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool { | |
242 | if (a.width >= b.width) && (a.height >= b.height) { | |
243 | return true; | |
244 | } | |
245 | if a.fmt.get_max_depth() > b.fmt.get_max_depth() { | |
246 | return true; | |
247 | } | |
248 | if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() { | |
249 | return true; | |
250 | } | |
251 | false | |
252 | } | |
253 | fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult<Option<Stage>> { | |
254 | let inname = ifmt.fmt.get_model().get_short_name(); | |
255 | let outname = ofmt.fmt.get_model().get_short_name(); | |
256 | ||
257 | println!("convert {} -> {}", ifmt, ofmt); | |
e243ceb4 | 258 | let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) && |
03accf76 | 259 | (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) { |
e243ceb4 KS |
260 | true |
261 | } else { | |
262 | !just_convert | |
263 | }; | |
e9d8cce7 | 264 | let needs_unpack = !ifmt.fmt.is_unpacked(); |
03accf76 | 265 | let needs_pack = !ofmt.fmt.is_unpacked(); |
e243ceb4 | 266 | let needs_convert = inname != outname; |
03accf76 KS |
267 | let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert |
268 | && (ofmt.fmt.get_max_subsampling() == 0); | |
4b459d0b | 269 | let needs_palettise = ofmt.fmt.palette; |
03accf76 KS |
270 | //todo stages for model and gamma conversion |
271 | ||
272 | let mut stages: Option<Stage> = None; | |
273 | let mut cur_fmt = *ifmt; | |
274 | ||
275 | if needs_unpack { | |
276 | println!("[adding unpack]"); | |
e243ceb4 KS |
277 | let new_stage = if !cur_fmt.fmt.is_paletted() { |
278 | Stage::new("unpack", &cur_fmt, &ofmt)? | |
279 | } else { | |
280 | Stage::new("depal", &cur_fmt, &ofmt)? | |
281 | }; | |
03accf76 KS |
282 | cur_fmt = new_stage.fmt_out; |
283 | add_stage!(stages, new_stage); | |
284 | } | |
285 | if needs_scale && scale_before_cvt { | |
286 | println!("[adding scale]"); | |
287 | let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; | |
288 | cur_fmt = new_stage.fmt_out; | |
289 | add_stage!(stages, new_stage); | |
290 | } | |
291 | if needs_convert { | |
292 | println!("[adding convert]"); | |
293 | let cvtname = format!("{}_to_{}", inname, outname); | |
294 | println!("[{}]", cvtname); | |
295 | let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?; | |
296 | //todo if fails try converting via RGB or YUV | |
297 | cur_fmt = new_stage.fmt_out; | |
298 | add_stage!(stages, new_stage); | |
299 | //todo alpha plane copy/add | |
300 | } | |
301 | if needs_scale && !scale_before_cvt { | |
302 | println!("[adding scale]"); | |
303 | let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; | |
304 | cur_fmt = new_stage.fmt_out; | |
305 | add_stage!(stages, new_stage); | |
306 | } | |
4b459d0b | 307 | if needs_pack && !needs_palettise { |
03accf76 KS |
308 | println!("[adding pack]"); |
309 | let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?; | |
310 | //cur_fmt = new_stage.fmt_out; | |
311 | add_stage!(stages, new_stage); | |
312 | } | |
4b459d0b KS |
313 | if needs_palettise { |
314 | println!("[adding palettise]"); | |
315 | let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?; | |
316 | //cur_fmt = new_stage.fmt_out; | |
317 | add_stage!(stages, new_stage); | |
318 | } | |
03accf76 KS |
319 | |
320 | if let Some(ref mut head) = stages { | |
321 | head.drop_last_tmp(); | |
322 | } | |
d24468d9 | 323 | |
03accf76 KS |
324 | Ok(stages) |
325 | } | |
326 | ||
085742a3 KS |
327 | fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) { |
328 | let mut doff0 = 0; | |
329 | let mut doff1 = stride * (h - 1); | |
330 | for _ in 0..h/2 { | |
331 | line0.copy_from_slice(&data[doff0..][..stride]); | |
332 | line1.copy_from_slice(&data[doff1..][..stride]); | |
333 | (&mut data[doff1..][..stride]).copy_from_slice(line0); | |
334 | (&mut data[doff0..][..stride]).copy_from_slice(line1); | |
335 | doff0 += stride; | |
336 | doff1 -= stride; | |
337 | } | |
338 | } | |
339 | ||
30d57e4a | 340 | /// Flips the picture contents. |
085742a3 KS |
341 | pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> { |
342 | match pic { | |
343 | NABufferType::Video(ref mut vb) => { | |
344 | let ncomp = vb.get_num_components(); | |
345 | for comp in 0..ncomp { | |
346 | let off = vb.get_offset(comp); | |
347 | let stride = vb.get_stride(comp); | |
348 | let (_, h) = vb.get_dimensions(comp); | |
349 | let data = vb.get_data_mut().unwrap(); | |
350 | let mut line0 = vec![0; stride]; | |
351 | let mut line1 = vec![0; stride]; | |
352 | swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); | |
353 | } | |
354 | }, | |
355 | NABufferType::Video16(ref mut vb) => { | |
36294f20 | 356 | let ncomp = vb.get_num_components().max(1); |
085742a3 KS |
357 | for comp in 0..ncomp { |
358 | let off = vb.get_offset(comp); | |
359 | let stride = vb.get_stride(comp); | |
360 | let (_, h) = vb.get_dimensions(comp); | |
361 | let data = vb.get_data_mut().unwrap(); | |
362 | let mut line0 = vec![0; stride]; | |
363 | let mut line1 = vec![0; stride]; | |
364 | swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); | |
365 | } | |
366 | }, | |
367 | NABufferType::Video32(ref mut vb) => { | |
36294f20 | 368 | let ncomp = vb.get_num_components().max(1); |
085742a3 KS |
369 | for comp in 0..ncomp { |
370 | let off = vb.get_offset(comp); | |
371 | let stride = vb.get_stride(comp); | |
372 | let (_, h) = vb.get_dimensions(comp); | |
373 | let data = vb.get_data_mut().unwrap(); | |
374 | let mut line0 = vec![0; stride]; | |
375 | let mut line1 = vec![0; stride]; | |
376 | swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); | |
377 | } | |
378 | }, | |
379 | NABufferType::VideoPacked(ref mut vb) => { | |
380 | let ncomp = vb.get_num_components(); | |
381 | for comp in 0..ncomp { | |
382 | let off = vb.get_offset(comp); | |
383 | let stride = vb.get_stride(comp); | |
384 | let (_, h) = vb.get_dimensions(comp); | |
385 | let data = vb.get_data_mut().unwrap(); | |
386 | let mut line0 = vec![0; stride]; | |
387 | let mut line1 = vec![0; stride]; | |
388 | swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); | |
389 | } | |
390 | }, | |
391 | _ => { return Err(ScaleError::InvalidArgument); }, | |
392 | }; | |
393 | Ok(()) | |
394 | } | |
395 | ||
03accf76 | 396 | impl NAScale { |
30d57e4a | 397 | /// Constructs a new `NAScale` instance. |
03accf76 KS |
398 | pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> { |
399 | let pipeline; | |
400 | let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height); | |
401 | if fmt_in != fmt_out { | |
402 | pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?; | |
403 | } else { | |
404 | pipeline = None; | |
405 | } | |
406 | Ok(Self { fmt_in, fmt_out, just_convert, pipeline }) | |
407 | } | |
30d57e4a | 408 | /// Checks whether requested conversion operation is needed at all. |
03accf76 | 409 | pub fn needs_processing(&self) -> bool { self.pipeline.is_some() } |
30d57e4a | 410 | /// Returns the input image format. |
03accf76 | 411 | pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in } |
30d57e4a | 412 | /// Returns the output image format. |
03accf76 | 413 | pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out } |
30d57e4a | 414 | /// Performs the image format conversion. |
03accf76 KS |
415 | pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> { |
416 | let in_info = pic_in.get_video_info(); | |
417 | let out_info = pic_out.get_video_info(); | |
418 | if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); } | |
419 | let in_info = in_info.unwrap(); | |
420 | let out_info = out_info.unwrap(); | |
421 | if self.just_convert && | |
422 | (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) { | |
423 | return Err(ScaleError::InvalidArgument); | |
424 | } | |
085742a3 | 425 | let needs_flip = in_info.is_flipped() ^ out_info.is_flipped(); |
03accf76 KS |
426 | check_format(in_info, &self.fmt_in, self.just_convert)?; |
427 | check_format(out_info, &self.fmt_out, self.just_convert)?; | |
085742a3 KS |
428 | let ret = if let Some(ref mut pipe) = self.pipeline { |
429 | pipe.process(pic_in, pic_out) | |
430 | } else { | |
431 | copy(pic_in, pic_out); | |
432 | Ok(()) | |
433 | }; | |
434 | if ret.is_ok() && needs_flip { | |
435 | flip_picture(pic_out)?; | |
03accf76 | 436 | } |
085742a3 | 437 | ret |
03accf76 KS |
438 | } |
439 | } | |
440 | ||
441 | #[cfg(test)] | |
442 | mod test { | |
443 | use super::*; | |
444 | ||
445 | fn fill_pic(pic: &mut NABufferType, val: u8) { | |
446 | if let Some(ref mut buf) = pic.get_vbuf() { | |
447 | let data = buf.get_data_mut().unwrap(); | |
448 | for el in data.iter_mut() { *el = val; } | |
449 | } else if let Some(ref mut buf) = pic.get_vbuf16() { | |
450 | let data = buf.get_data_mut().unwrap(); | |
451 | for el in data.iter_mut() { *el = val as u16; } | |
452 | } else if let Some(ref mut buf) = pic.get_vbuf32() { | |
453 | let data = buf.get_data_mut().unwrap(); | |
454 | for el in data.iter_mut() { *el = (val as u32) * 0x01010101; } | |
455 | } | |
456 | } | |
457 | #[test] | |
458 | fn test_convert() { | |
459 | let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap(); | |
460 | fill_pic(&mut in_pic, 42); | |
461 | let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap(); | |
462 | fill_pic(&mut out_pic, 0); | |
463 | let ifmt = get_scale_fmt_from_pic(&in_pic); | |
464 | let ofmt = get_scale_fmt_from_pic(&out_pic); | |
465 | let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); | |
466 | scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
467 | let obuf = out_pic.get_vbuf().unwrap(); | |
468 | let odata = obuf.get_data(); | |
469 | assert_eq!(odata[0], 0x0); | |
470 | assert_eq!(odata[1], 0x4); | |
471 | assert_eq!(odata[2], 0x52); | |
472 | ||
473 | let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap(); | |
474 | fill_pic(&mut in_pic, 42); | |
475 | let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap(); | |
476 | fill_pic(&mut out_pic, 0); | |
477 | let ifmt = get_scale_fmt_from_pic(&in_pic); | |
478 | let ofmt = get_scale_fmt_from_pic(&out_pic); | |
479 | let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); | |
480 | scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
481 | let obuf = out_pic.get_vbuf().unwrap(); | |
482 | let yoff = obuf.get_offset(0); | |
483 | let uoff = obuf.get_offset(1); | |
484 | let voff = obuf.get_offset(2); | |
485 | let odata = obuf.get_data(); | |
486 | assert_eq!(odata[yoff], 42); | |
487 | assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1); | |
488 | assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1); | |
489 | let mut scaler = NAScale::new(ofmt, ifmt).unwrap(); | |
490 | scaler.convert(&out_pic, &mut in_pic).unwrap(); | |
491 | let obuf = in_pic.get_vbuf().unwrap(); | |
492 | let odata = obuf.get_data(); | |
493 | assert_eq!(odata[0], 42); | |
494 | } | |
495 | #[test] | |
496 | fn test_scale() { | |
497 | let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap(); | |
498 | fill_pic(&mut in_pic, 42); | |
499 | let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap(); | |
500 | fill_pic(&mut out_pic, 0); | |
501 | let ifmt = get_scale_fmt_from_pic(&in_pic); | |
502 | let ofmt = get_scale_fmt_from_pic(&out_pic); | |
503 | let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); | |
504 | scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
505 | let obuf = out_pic.get_vbuf16().unwrap(); | |
506 | let odata = obuf.get_data(); | |
507 | assert_eq!(odata[0], 42); | |
508 | } | |
509 | #[test] | |
510 | fn test_scale_and_convert() { | |
511 | let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap(); | |
512 | fill_pic(&mut in_pic, 42); | |
513 | let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap(); | |
514 | fill_pic(&mut out_pic, 0); | |
515 | let ifmt = get_scale_fmt_from_pic(&in_pic); | |
516 | let ofmt = get_scale_fmt_from_pic(&out_pic); | |
517 | let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); | |
518 | scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
519 | let obuf = out_pic.get_vbuf().unwrap(); | |
520 | let yoff = obuf.get_offset(0); | |
521 | let uoff = obuf.get_offset(1); | |
522 | let voff = obuf.get_offset(2); | |
523 | let odata = obuf.get_data(); | |
524 | assert_eq!(odata[yoff], 28); | |
525 | assert_eq!(odata[uoff], 154); | |
526 | assert_eq!(odata[voff], 103); | |
527 | } | |
4b459d0b KS |
528 | #[test] |
529 | fn test_scale_and_convert_to_pal() { | |
530 | let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap(); | |
531 | fill_pic(&mut in_pic, 142); | |
532 | let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap(); | |
533 | fill_pic(&mut out_pic, 0); | |
534 | let ifmt = get_scale_fmt_from_pic(&in_pic); | |
535 | let ofmt = get_scale_fmt_from_pic(&out_pic); | |
536 | let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); | |
537 | scaler.convert(&in_pic, &mut out_pic).unwrap(); | |
538 | let obuf = out_pic.get_vbuf().unwrap(); | |
539 | let dataoff = obuf.get_offset(0); | |
540 | let paloff = obuf.get_offset(1); | |
541 | let odata = obuf.get_data(); | |
542 | assert_eq!(odata[dataoff], 0); | |
543 | assert_eq!(odata[paloff], 157); | |
544 | assert_eq!(odata[paloff + 1], 99); | |
545 | assert_eq!(odata[paloff + 2], 170); | |
546 | } | |
03accf76 | 547 | } |