start work on nihed-cros-libva
[nihav-player.git] / nihed-cros-libva / src / lib.rs
CommitLineData
68362724
KS
1// Copyright 2022 The ChromiumOS Authors
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5//! Implements a lightweight and safe interface over `libva`.
6//!
7//! The starting point to using this crate is to open a [`Display`], from which a [`Context`] and
8//! [`Surface`]s can be allocated and used for doing actual work.
9
10#![deny(missing_docs)]
11
12mod bindings;
13pub mod buffer;
14mod config;
15mod context;
16mod display;
17mod generic_value;
18mod image;
19mod picture;
20mod status;
21mod surface;
22mod usage_hint;
23
24pub use bindings::constants;
25pub use bindings::VAConfigAttrib;
26pub use bindings::VAConfigAttribType;
27pub use bindings::VAEntrypoint;
28pub use bindings::VAImageFormat;
29pub use bindings::VAProfile;
30pub use bindings::VASurfaceAttribType;
31pub use bindings::VASurfaceID;
32pub use bindings::VASurfaceStatus;
33pub use buffer::*;
34pub use config::*;
35pub use context::*;
36pub use display::*;
37pub use generic_value::*;
38pub use image::*;
39pub use picture::*;
40pub use surface::*;
41pub use usage_hint::*;
42
43#[cfg(test)]
44mod tests {
45 use std::rc::Rc;
46
47 use super::*;
48
49 /// Returns a 32-bit CRC for the visible part of `image`, which must be in NV12 format.
50 fn crc_nv12_image(image: &Image) -> u32 {
51 let data = image.as_ref();
52 let va_image = image.image();
53 let offsets = &va_image.offsets;
54 let pitches = &va_image.pitches;
55 let width = va_image.width as usize;
56 let height = va_image.height as usize;
57
58 // We only support NV12 images
59 assert_eq!(va_image.format.fourcc, u32::from_ne_bytes(*b"NV12"));
60 // Consistency check
61 assert_eq!(va_image.num_planes, 2);
62
63 let mut hasher = crc32fast::Hasher::new();
64
65 let offset = offsets[0] as usize;
66 let pitch = pitches[0] as usize;
67 let y_plane = data[offset..(offset + pitch * height)]
68 .chunks(pitch)
69 .map(|line| &line[0..width]);
70
71 let offset = offsets[1] as usize;
72 let pitch = pitches[1] as usize;
73 let uv_plane = data[offset..(offset + pitch * ((height + 1) / 2))]
74 .chunks(pitch)
75 .map(|line| &line[0..width]);
76
77 for line in y_plane.chain(uv_plane) {
78 hasher.update(line);
79 }
80
81 hasher.finalize()
82 }
83
84 #[test]
85 // Ignore this test by default as it requires libva-compatible hardware.
86 #[ignore]
87 fn libva_utils_mpeg2vldemo() {
88 // Adapted from <https://github.com/intel/libva-utils/blob/master/decode/mpeg2vldemo.cpp>
89 let display = Display::open().unwrap();
90
91 assert!(!display.query_vendor_string().unwrap().is_empty());
92 let profiles = display.query_config_profiles().unwrap();
93 assert!(!profiles.is_empty());
94
95 let profile = bindings::VAProfile::VAProfileMPEG2Main;
96 let entrypoints = display.query_config_entrypoints(profile).unwrap();
97 assert!(!entrypoints.is_empty());
98 assert!(entrypoints
99 .iter()
100 .any(|e| *e == bindings::VAEntrypoint::VAEntrypointVLD));
101
102 let format = bindings::constants::VA_RT_FORMAT_YUV420;
103 let width = 16;
104 let height = 16;
105
106 let mut attrs = vec![bindings::VAConfigAttrib {
107 type_: bindings::VAConfigAttribType::VAConfigAttribRTFormat,
108 value: 0,
109 }];
110
111 let entrypoint = bindings::VAEntrypoint::VAEntrypointVLD;
112 display
113 .get_config_attributes(profile, entrypoint, &mut attrs)
114 .unwrap();
115 assert!(attrs[0].value != bindings::constants::VA_ATTRIB_NOT_SUPPORTED);
116 assert!(attrs[0].value & bindings::constants::VA_RT_FORMAT_YUV420 != 0);
117
118 let config = display.create_config(attrs, profile, entrypoint).unwrap();
119
120 let mut surfaces = display
121 .create_surfaces(
122 format,
123 None,
124 width,
125 height,
126 Some(UsageHint::USAGE_HINT_DECODER),
127 1,
128 )
129 .unwrap();
130 let context = display
131 .create_context(
132 &config,
133 width as i32,
134 (((height + 15) / 16) * 16) as i32,
135 Some(&surfaces),
136 true,
137 )
138 .unwrap();
139
140 // The picture data is adapted from libva-utils at decode/mpeg2vldemo.cpp
141 // Data dump of a 16x16 MPEG2 video clip,it has one I frame
142 let mut mpeg2_clip: Vec<u8> = vec![
143 0x00, 0x00, 0x01, 0xb3, 0x01, 0x00, 0x10, 0x13, 0xff, 0xff, 0xe0, 0x18, 0x00, 0x00,
144 0x01, 0xb5, 0x14, 0x8a, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0xb8, 0x00, 0x08,
145 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x0f, 0xff, 0xf8, 0x00, 0x00, 0x01, 0xb5,
146 0x8f, 0xff, 0xf3, 0x41, 0x80, 0x00, 0x00, 0x01, 0x01, 0x13, 0xe1, 0x00, 0x15, 0x81,
147 0x54, 0xe0, 0x2a, 0x05, 0x43, 0x00, 0x2d, 0x60, 0x18, 0x01, 0x4e, 0x82, 0xb9, 0x58,
148 0xb1, 0x83, 0x49, 0xa4, 0xa0, 0x2e, 0x05, 0x80, 0x4b, 0x7a, 0x00, 0x01, 0x38, 0x20,
149 0x80, 0xe8, 0x05, 0xff, 0x60, 0x18, 0xe0, 0x1d, 0x80, 0x98, 0x01, 0xf8, 0x06, 0x00,
150 0x54, 0x02, 0xc0, 0x18, 0x14, 0x03, 0xb2, 0x92, 0x80, 0xc0, 0x18, 0x94, 0x42, 0x2c,
151 0xb2, 0x11, 0x64, 0xa0, 0x12, 0x5e, 0x78, 0x03, 0x3c, 0x01, 0x80, 0x0e, 0x80, 0x18,
152 0x80, 0x6b, 0xca, 0x4e, 0x01, 0x0f, 0xe4, 0x32, 0xc9, 0xbf, 0x01, 0x42, 0x69, 0x43,
153 0x50, 0x4b, 0x01, 0xc9, 0x45, 0x80, 0x50, 0x01, 0x38, 0x65, 0xe8, 0x01, 0x03, 0xf3,
154 0xc0, 0x76, 0x00, 0xe0, 0x03, 0x20, 0x28, 0x18, 0x01, 0xa9, 0x34, 0x04, 0xc5, 0xe0,
155 0x0b, 0x0b, 0x04, 0x20, 0x06, 0xc0, 0x89, 0xff, 0x60, 0x12, 0x12, 0x8a, 0x2c, 0x34,
156 0x11, 0xff, 0xf6, 0xe2, 0x40, 0xc0, 0x30, 0x1b, 0x7a, 0x01, 0xa9, 0x0d, 0x00, 0xac,
157 0x64,
158 ];
159
160 let picture_coding_extension =
161 MPEG2PictureCodingExtension::new(0, 3, 0, 1, 0, 0, 0, 0, 0, 1, 1);
162 let pic_param = PictureParameterBufferMPEG2::new(
163 16,
164 16,
165 0xffffffff,
166 0xffffffff,
167 1,
168 0xffff,
169 &picture_coding_extension,
170 );
171
172 let pic_param = BufferType::PictureParameter(PictureParameter::MPEG2(pic_param));
173
174 let iq_matrix = IQMatrixBufferMPEG2::new(
175 1,
176 1,
177 0,
178 0,
179 [
180 8, 16, 16, 19, 16, 19, 22, 22, 22, 22, 22, 22, 26, 24, 26, 27, 27, 27, 26, 26, 26,
181 26, 27, 27, 27, 29, 29, 29, 34, 34, 34, 29, 29, 29, 27, 27, 29, 29, 32, 32, 34, 34,
182 37, 38, 37, 35, 35, 34, 35, 38, 38, 40, 40, 40, 48, 48, 46, 46, 56, 56, 58, 69, 69,
183 83,
184 ],
185 [
186 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
188 0, 0, 0, 0, 0, 0, 0, 0, 0,
189 ],
190 [0; 64],
191 [0; 64],
192 );
193
194 let iq_matrix = BufferType::IQMatrix(IQMatrix::MPEG2(iq_matrix));
195
196 let slice_param = SliceParameterBufferMPEG2::new(150, 0, 0, 38, 0, 0, 2, 0);
197
198 let slice_param = BufferType::SliceParameter(SliceParameter::MPEG2(slice_param));
199
200 let test_data_offset = 47;
201 let slice_data = BufferType::SliceData(mpeg2_clip.drain(test_data_offset..).collect());
202
203 let buffers = vec![
204 context.create_buffer(pic_param).unwrap(),
205 context.create_buffer(slice_param).unwrap(),
206 context.create_buffer(iq_matrix).unwrap(),
207 context.create_buffer(slice_data).unwrap(),
208 ];
209
210 let mut picture = Picture::new(0, Rc::clone(&context), surfaces.remove(0));
211 for buffer in buffers {
212 picture.add_buffer(buffer);
213 }
214
215 // Actual client code can just chain the calls.
216 let picture = picture.begin().unwrap();
217 let picture = picture.render().unwrap();
218 let picture = picture.end().unwrap();
219 let picture = picture.sync().map_err(|(e, _)| e).unwrap();
220
221 // Test whether we can map the resulting surface to obtain the raw yuv
222 // data
223 let image_fmts = display.query_image_formats().unwrap();
224 let image_fmt = image_fmts
225 .into_iter()
226 .find(|f| f.fourcc == bindings::constants::VA_FOURCC_NV12)
227 .expect("No valid VAImageFormat found for NV12");
228
229 let image = Image::new(&picture, image_fmt, width, height, false).unwrap();
230
231 assert_eq!(crc_nv12_image(&image), 0xa5713e52);
232 }
233}