aac: clear M/S flags
[nihav.git] / nihav-acorn / src / codecs / linepack.rs
1 use nihav_core::codecs::*;
2 use nihav_core::io::byteio::*;
3
4 use super::RGB555_FORMAT;
5 use super::yuvtab::YUV2RGB;
6
7 #[derive(Default)]
8 struct LinePackDecoder {
9 info: NACodecInfoRef,
10 cur_frm: Vec<u16>,
11 prev_frm: Vec<u16>,
12 width: usize,
13 is_yuv: bool,
14 }
15
16 impl LinePackDecoder {
17 fn new() -> Self { Self::default() }
18 }
19
20 impl NADecoder for LinePackDecoder {
21 fn init(&mut self, _supp: &mut NADecoderSupport, info: NACodecInfoRef) -> DecoderResult<()> {
22 if let NACodecTypeInfo::Video(vinfo) = info.get_properties() {
23 let myinfo = NACodecTypeInfo::Video(NAVideoInfo::new(vinfo.get_width(), vinfo.get_height(), false, RGB555_FORMAT));
24 self.info = NACodecInfo::new_ref(info.get_name(), myinfo, info.get_extradata()).into_ref();
25 self.cur_frm = vec![0; vinfo.get_width() * vinfo.get_height()];
26 self.prev_frm = vec![0; vinfo.get_width() * vinfo.get_height()];
27 self.width = vinfo.get_width();
28 if let Some(edata) = info.get_extradata() {
29 for triplet in edata.windows(3) {
30 if triplet == b"YUV" {
31 self.is_yuv = true;
32 break;
33 }
34 }
35 }
36 Ok(())
37 } else {
38 Err(DecoderError::InvalidData)
39 }
40 }
41 fn decode(&mut self, _supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
42 let src = pkt.get_buffer();
43 validate!(src.len() > 2 && (src.len() & 1) == 0);
44 let mut mr = MemoryReader::new_read(&src);
45 let mut br = ByteReader::new(&mut mr);
46
47 let mut is_intra = true;
48 let mut dpos = 0;
49 while dpos < self.cur_frm.len() {
50 let val = br.read_u16le()?;
51 if (val & 0x8000) == 0 { // raw pixel
52 self.cur_frm[dpos] = val;
53 dpos += 1;
54 } else {
55 let op = (val >> 12) & 7;
56 match op {
57 0 => { // skip
58 let len = (val & 0xFFF) as usize;
59 validate!(dpos + len <= self.cur_frm.len());
60 self.cur_frm[dpos..][..len].copy_from_slice(&self.prev_frm[dpos..][..len]);
61 dpos += len;
62 is_intra = false;
63 },
64 1 => { // motion
65 let dx = (( val & 7) as isize) - 4;
66 let dy = (((val >> 3) & 7) as isize) - 4;
67 let len = ((val >> 6) & 0x3F) as usize;
68 validate!(dpos + len <= self.cur_frm.len());
69 if dx == 0 && dy == 0 { // previous line
70 validate!(dpos >= self.width);
71 for _ in 0..len {
72 self.cur_frm[dpos] = self.cur_frm[dpos - self.width];
73 dpos += 1;
74 }
75 } else {
76 let offset = (dpos as isize) + dx + dy * (self.width as isize);
77 validate!(offset >= 0);
78 let offset = offset as usize;
79 validate!(offset + len <= self.prev_frm.len());
80 self.cur_frm[dpos..][..len].copy_from_slice(&self.prev_frm[offset..][..len]);
81 dpos += len;
82 is_intra = false;
83 }
84 },
85 2 => { // run
86 let len = (val & 0xFFF) as usize;
87 validate!(dpos + len <= self.cur_frm.len());
88 let pix = br.read_u16le()?;
89 for _ in 0..len {
90 self.cur_frm[dpos] = pix;
91 dpos += 1;
92 }
93 },
94 3 => { // raw
95 let len = (val & 0xFFF) as usize;
96 validate!(dpos + len <= self.cur_frm.len());
97 for _ in 0..len {
98 self.cur_frm[dpos] = br.read_u16le()?;
99 dpos += 1;
100 }
101 },
102 4 => { // four-colour pattern
103 let len = (val & 0xFF) as usize;
104 validate!(dpos + len <= self.cur_frm.len());
105 let clrs = [
106 br.read_u16le()?,
107 br.read_u16le()?,
108 br.read_u16le()?,
109 br.read_u16le()?
110 ];
111 let mut mask = 0;
112 let mut pos = 8;
113
114 for _i in 0..len {
115 if pos == 8 {
116 mask = br.read_u16le()? as usize;
117 pos = 0;
118 }
119 self.cur_frm[dpos] = clrs[mask & 3];
120 dpos += 1;
121 mask >>= 2;
122 pos += 1;
123 }
124 },
125 5 => { // interleaved
126 let len = (val & 0xFFF) as usize;
127 validate!(dpos + len * 2 <= self.cur_frm.len());
128 let clrs = [
129 br.read_u16le()?,
130 br.read_u16le()?
131 ];
132 for _ in 0..len {
133 self.cur_frm[dpos] = clrs[0];
134 dpos += 1;
135 self.cur_frm[dpos] = clrs[1];
136 dpos += 1;
137 }
138 },
139 _ => return Err(DecoderError::NotImplemented),
140 }
141 }
142 }
143
144 let bufinfo = alloc_video_buffer(self.info.get_properties().get_video_info().unwrap(), 0)?;
145 let mut buf = bufinfo.get_vbuf16().unwrap();
146 let stride = buf.get_stride(0);
147 let data = buf.get_data_mut().unwrap();
148
149 for (dline, sline) in data.chunks_exact_mut(stride)
150 .zip(self.cur_frm.chunks_exact(self.width)) {
151 dline[..self.width].copy_from_slice(sline);
152 }
153 if self.is_yuv {
154 for el in data.iter_mut() {
155 *el = YUV2RGB[(*el as usize) & 0x7FFF];
156 }
157 }
158
159 std::mem::swap(&mut self.cur_frm, &mut self.prev_frm);
160
161 let mut frm = NAFrame::new_from_pkt(pkt, self.info.clone(), bufinfo);
162 frm.set_keyframe(is_intra);
163 frm.set_frame_type(if is_intra { FrameType::I } else { FrameType::P });
164 Ok(frm.into_ref())
165 }
166 fn flush(&mut self) {
167 for el in self.cur_frm.iter_mut() {
168 *el = 0;
169 }
170 for el in self.prev_frm.iter_mut() {
171 *el = 0;
172 }
173 }
174 }
175
176 impl NAOptionHandler for LinePackDecoder {
177 fn get_supported_options(&self) -> &[NAOptionDefinition] { &[] }
178 fn set_options(&mut self, _options: &[NAOption]) { }
179 fn query_option_value(&self, _name: &str) -> Option<NAValue> { None }
180 }
181
182 pub fn get_decoder() -> Box<dyn NADecoder + Send> {
183 Box::new(LinePackDecoder::new())
184 }
185
186 #[derive(Default)]
187 struct LinePackPacketiser {
188 stream: Option<NAStreamRef>,
189 buf: Vec<u8>,
190 end: usize,
191 frameno: u32,
192 intra: bool,
193 pos: usize,
194 img_size: usize,
195 }
196
197 impl LinePackPacketiser {
198 fn new() -> Self { Self::default() }
199 }
200
201 impl NAPacketiser for LinePackPacketiser {
202 fn attach_stream(&mut self, stream: NAStreamRef) {
203 let vinfo = stream.get_info().get_properties().get_video_info().unwrap();
204 self.img_size = vinfo.width * vinfo.height;
205 self.stream = Some(stream);
206 }
207 fn add_data(&mut self, src: &[u8]) -> bool {
208 self.buf.extend_from_slice(src);
209 self.buf.len() < (1 << 10)
210 }
211 fn parse_stream(&mut self, id: u32) -> DecoderResult<NAStreamRef> {
212 if let Some(ref stream) = self.stream {
213 let mut stream = NAStream::clone(stream);
214 stream.id = id;
215 Ok(stream.into_ref())
216 } else {
217 Err(DecoderError::MissingReference)
218 }
219 }
220 fn skip_junk(&mut self) -> DecoderResult<usize> {
221 Err(DecoderError::NotImplemented)
222 }
223 fn get_packet(&mut self, stream: NAStreamRef) -> DecoderResult<Option<NAPacket>> {
224 if self.buf.len() < self.end {
225 return Ok(None);
226 }
227
228 if self.end == 0 {
229 self.intra = true;
230 self.pos = 0;
231 }
232
233 while self.end + 2 <= self.buf.len() && self.pos < self.img_size {
234 let val = u16::from(self.buf[self.end + 1]) * 256 + u16::from(self.buf[self.end]);
235 self.end += 2;
236
237 if (val & 0x8000) == 0 {
238 self.pos += 1;
239 } else {
240 let op = (val >> 12) & 7;
241 let common_len = (val & 0xFFF) as usize;
242 self.pos += match op {
243 0 => common_len, // skip size
244 1 => ((val >> 6) & 0x3F) as usize, // motion size
245 2 => common_len, // run
246 3 => common_len, // raw
247 4 => common_len & 0xFF, // four-colour pattern
248 5 => common_len * 2, // interleaved
249 _ => 0, // ???
250 };
251 self.end += match op {
252 2 => 2, // run value
253 3 => common_len * 2, // raw values
254 4 => 8 + ((common_len & 0xFF) + 7) / 8 * 2, // 4 colours + masks
255 5 => 4, // two values
256 _ => 0,
257 };
258 if (op == 0) || (op == 1 && (val & 0x3F) != 0x24) || (op == 2) {
259 self.intra = false;
260 }
261 }
262 }
263
264 if self.pos >= self.img_size && self.end <= self.buf.len() {
265 let mut data = Vec::with_capacity(self.end);
266 data.extend_from_slice(&self.buf[..self.end]);
267 self.buf.drain(..self.end);
268 let ts = NATimeInfo::new(Some(u64::from(self.frameno)), None, None, stream.tb_num, stream.tb_den);
269 self.end = 0;
270 self.frameno += 1;
271
272 return Ok(Some(NAPacket::new(stream, ts, self.intra, data)));
273 }
274
275 Ok(None)
276 }
277 fn reset(&mut self) {
278 self.buf.clear();
279 self.end = 0;
280 }
281 fn bytes_left(&self) -> usize { self.buf.len() }
282 }
283
284 pub fn get_packetiser() -> Box<dyn NAPacketiser + Send> {
285 Box::new(LinePackPacketiser::new())
286 }
287
288 #[cfg(test)]
289 mod test {
290 use nihav_core::codecs::{RegisteredDecoders, RegisteredPacketisers};
291 use nihav_core::demuxers::RegisteredRawDemuxers;
292 use nihav_codec_support::test::dec_video::*;
293 use crate::*;
294 #[test]
295 fn test_linepack() {
296 let mut dmx_reg = RegisteredRawDemuxers::new();
297 acorn_register_all_raw_demuxers(&mut dmx_reg);
298 let mut pkt_reg = RegisteredPacketisers::new();
299 acorn_register_all_packetisers(&mut pkt_reg);
300 let mut dec_reg = RegisteredDecoders::new();
301 acorn_register_all_decoders(&mut dec_reg);
302
303 // a sample from Cine Clips by Oregan Software Developments
304 test_decoding_raw("armovie", "linepack", "assets/Acorn/COLOURPLUS", Some(5),
305 &dmx_reg, &pkt_reg, &dec_reg,
306 ExpectedTestResult::MD5Frames(vec![
307 [0x373eb9d6, 0xc52d7abd, 0xe1f3631b, 0xf509cb16],
308 [0x373eb9d6, 0xc52d7abd, 0xe1f3631b, 0xf509cb16],
309 [0x373eb9d6, 0xc52d7abd, 0xe1f3631b, 0xf509cb16],
310 [0x373eb9d6, 0xc52d7abd, 0xe1f3631b, 0xf509cb16],
311 [0x373eb9d6, 0xc52d7abd, 0xe1f3631b, 0xf509cb16],
312 [0x32033527, 0x3073331b, 0x83942239, 0x57f975ee]]));
313 }
314 }