video: Implement a very basic video decoding loop

This commit is contained in:
David Wendt 2020-12-22 21:20:28 -05:00 committed by Mike Welsh
parent 6dc96f5d58
commit 368c1cf3c5
3 changed files with 128 additions and 15 deletions

View File

@ -15,14 +15,14 @@ pub type Error = Box<dyn std::error::Error>;
/// An encoded video frame of some video codec.
pub struct EncodedFrame<'a> {
/// The codec used to encode the frame.
codec: VideoCodec,
pub codec: VideoCodec,
/// The raw bitstream data to funnel into the codec.
data: &'a [u8],
pub data: &'a [u8],
/// A caller-specified frame ID. Frame IDs must be consistent between
/// subsequent uses of the same data stream.
frame_id: u32,
pub frame_id: u32,
}
impl<'a> EncodedFrame<'a> {

View File

@ -982,8 +982,8 @@ pub trait TDisplayObject<'gc>:
if let Some(ratio) = place_object.ratio {
if let Some(mut morph_shape) = self.as_morph_shape() {
morph_shape.set_ratio(context.gc_context, ratio);
} else if let Some(mut video) = self.as_video() {
video.seek(context, ratio);
} else if let Some(video) = self.as_video() {
video.seek(context, ratio.into());
}
}
// Clip events only apply to movie clips.

View File

@ -1,11 +1,16 @@
//! Video player display object
use crate::avm1::Object as Avm1Object;
use crate::backend::render::BitmapHandle;
use crate::backend::video::{EncodedFrame, VideoStreamHandle};
use crate::bounding_box::BoundingBox;
use crate::context::UpdateContext;
use crate::collect::CollectWrapper;
use crate::context::{RenderContext, UpdateContext};
use crate::display_object::{DisplayObjectBase, TDisplayObject};
use crate::prelude::*;
use crate::tag_utils::{SwfMovie, SwfSlice};
use crate::types::{Degrees, Percent};
use crate::vminterface::Instantiator;
use gc_arena::{Collect, GcCell, MutationContext};
use std::borrow::{Borrow, BorrowMut};
use std::collections::BTreeMap;
@ -26,7 +31,15 @@ pub struct Video<'gc>(GcCell<'gc, VideoData<'gc>>);
#[collect(no_drop)]
pub struct VideoData<'gc> {
base: DisplayObjectBase<'gc>,
source_stream: GcCell<'gc, VideoSource>,
/// The source of the video data (e.g. an external file, a SWF bitstream)
source: GcCell<'gc, VideoSource>,
/// The decoder stream that this video source is associated to.
stream: Option<CollectWrapper<VideoStreamHandle>>,
/// The last decoded frame in the video stream.
decoded_frame: Option<CollectWrapper<BitmapHandle>>,
}
#[derive(Clone, Debug, Collect)]
@ -43,7 +56,7 @@ pub enum VideoSource {
///
/// Each frame consists of a start and end parameter which can be used
/// to reconstruct a reference to the embedded bitstream.
frames: BTreeMap<u16, (usize, usize)>,
frames: BTreeMap<u32, (usize, usize)>,
},
}
@ -54,7 +67,7 @@ impl<'gc> Video<'gc> {
streamdef: DefineVideoStream,
mc: MutationContext<'gc, '_>,
) -> Self {
let source_stream = GcCell::allocate(
let source = GcCell::allocate(
mc,
VideoSource::SWF {
movie,
@ -67,7 +80,9 @@ impl<'gc> Video<'gc> {
mc,
VideoData {
base: Default::default(),
source_stream,
source,
stream: None,
decoded_frame: None,
},
))
}
@ -80,7 +95,7 @@ impl<'gc> Video<'gc> {
match (*self
.0
.write(context.gc_context)
.source_stream
.source
.write(context.gc_context))
.borrow_mut()
{
@ -92,7 +107,7 @@ impl<'gc> Video<'gc> {
let subslice = SwfSlice::from(movie.clone()).to_unbounded_subslice(tag.data);
if let Some(subslice) = subslice {
frames.insert(tag.frame_num, (subslice.start, subslice.end));
frames.insert(tag.frame_num.into(), (subslice.start, subslice.end));
} else {
log::warn!("Invalid bitstream subslice on frame {}", tag.frame_num);
}
@ -101,7 +116,50 @@ impl<'gc> Video<'gc> {
}
/// Seek to a particular frame in the video stream.
pub fn seek(&self, _context: &mut UpdateContext<'_, 'gc, '_>, _frame: u16) {}
pub fn seek(self, context: &mut UpdateContext<'_, 'gc, '_>, frame_id: u32) {
let read = self.0.read();
let source = read.source;
let stream = if let Some(stream) = &read.stream {
stream
} else {
log::error!("Attempted to sync uninstantiated video stream!");
return;
};
let res = match &*source.read() {
VideoSource::SWF {
movie,
streamdef,
frames,
} => match frames.get(&frame_id) {
Some((slice_start, slice_end)) => {
let encframe = EncodedFrame {
codec: streamdef.codec,
data: &movie.data()[*slice_start..*slice_end],
frame_id,
};
context
.video
.decode_video_stream_frame(stream.0, encframe, context.renderer)
}
None => Err(Box::from(format!(
"Attempted to seek to unknown frame {}",
frame_id
))),
},
};
drop(read);
match res {
Ok(bitmapinfo) => {
let bitmap = bitmapinfo.handle;
self.0.write(context.gc_context).decoded_frame = Some(CollectWrapper(bitmap))
}
Err(e) => log::error!("Got error when seeking video: {}", e),
}
}
}
impl<'gc> TDisplayObject<'gc> for Video<'gc> {
@ -111,8 +169,46 @@ impl<'gc> TDisplayObject<'gc> for Video<'gc> {
Some(self)
}
fn post_instantiation(
&self,
context: &mut UpdateContext<'_, 'gc, '_>,
_display_object: DisplayObject<'gc>,
_init_object: Option<Avm1Object<'gc>>,
_instantiated_by: Instantiator,
run_frame: bool,
) {
let mut write = self.0.write(context.gc_context);
let stream = match &*write.source.read() {
VideoSource::SWF { streamdef, .. } => {
let stream = context.video.register_video_stream(
streamdef.num_frames.into(),
(streamdef.width, streamdef.height),
streamdef.codec,
streamdef.deblocking,
);
if stream.is_err() {
log::error!(
"Got error when post-instantiating video: {}",
stream.unwrap_err()
);
return;
}
Some(CollectWrapper(stream.unwrap()))
}
};
write.stream = stream;
drop(write);
if run_frame {
self.run_frame(context);
}
}
fn id(&self) -> CharacterId {
match (*self.0.read().source_stream.read()).borrow() {
match (*self.0.read().source.read()).borrow() {
VideoSource::SWF { streamdef, .. } => streamdef.id,
}
}
@ -120,7 +216,7 @@ impl<'gc> TDisplayObject<'gc> for Video<'gc> {
fn self_bounds(&self) -> BoundingBox {
let mut bounding_box = BoundingBox::default();
match (*self.0.read().source_stream.read()).borrow() {
match (*self.0.read().source.read()).borrow() {
VideoSource::SWF { streamdef, .. } => {
bounding_box.set_width(Twips::from_pixels(streamdef.width as f64));
bounding_box.set_height(Twips::from_pixels(streamdef.height as f64));
@ -129,4 +225,21 @@ impl<'gc> TDisplayObject<'gc> for Video<'gc> {
bounding_box
}
fn render(&self, context: &mut RenderContext) {
if !self.world_bounds().intersects(&context.view_bounds) {
// Off-screen; culled
return;
}
context.transform_stack.push(&*self.transform());
if let Some(ref bitmap) = self.0.read().decoded_frame {
context
.renderer
.render_bitmap(bitmap.0, context.transform_stack.transform(), false);
}
context.transform_stack.pop();
}
}