avm1: Implement `MovieClip.attachAudio`.

`NetStream`s in AVM1 do not play audio unless attached to a `MovieClip`. As far as I can tell there doesn't seem to be any equivalent API in AVM2.
This commit is contained in:
David Wendt 2023-07-20 21:08:50 -04:00 committed by kmeisthax
parent dbee260d79
commit 9b29791eeb
3 changed files with 148 additions and 37 deletions

View File

@ -65,6 +65,7 @@ macro_rules! mc_setter {
} }
const PROTO_DECLS: &[Declaration] = declare_properties! { const PROTO_DECLS: &[Declaration] = declare_properties! {
"attachAudio" => method(mc_method!(attach_audio); DONT_ENUM | DONT_DELETE | VERSION_6);
"attachBitmap" => method(mc_method!(attach_bitmap); DONT_ENUM | DONT_DELETE | VERSION_8); "attachBitmap" => method(mc_method!(attach_bitmap); DONT_ENUM | DONT_DELETE | VERSION_8);
"attachMovie" => method(mc_method!(attach_movie); DONT_ENUM | DONT_DELETE); "attachMovie" => method(mc_method!(attach_movie); DONT_ENUM | DONT_DELETE);
"beginFill" => method(mc_method!(begin_fill); DONT_ENUM | DONT_DELETE | VERSION_6); "beginFill" => method(mc_method!(begin_fill); DONT_ENUM | DONT_DELETE | VERSION_6);
@ -286,6 +287,22 @@ fn attach_bitmap<'gc>(
Ok(Value::Undefined) Ok(Value::Undefined)
} }
fn attach_audio<'gc>(
movie_clip: MovieClip<'gc>,
activation: &mut Activation<'_, 'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
if let [Value::Object(netstream_obj), ..] = args {
if let NativeObject::NetStream(netstream) = netstream_obj.native() {
movie_clip.attach_audio(&mut activation.context, Some(netstream));
}
} else if let [Value::Bool(false), ..] = args {
movie_clip.attach_audio(&mut activation.context, None);
}
Ok(Value::Undefined)
}
fn line_style<'gc>( fn line_style<'gc>(
movie_clip: MovieClip<'gc>, movie_clip: MovieClip<'gc>,
activation: &mut Activation<'_, 'gc>, activation: &mut Activation<'_, 'gc>,

View File

@ -34,6 +34,7 @@ use crate::limits::ExecutionLimit;
use crate::loader; use crate::loader;
use crate::loader::Loader; use crate::loader::Loader;
use crate::prelude::*; use crate::prelude::*;
use crate::streams::NetStream;
use crate::string::{AvmString, SwfStrExt as _, WStr, WString}; use crate::string::{AvmString, SwfStrExt as _, WStr, WString};
use crate::tag_utils::{self, ControlFlow, DecodeResult, Error, SwfMovie, SwfSlice, SwfStream}; use crate::tag_utils::{self, ControlFlow, DecodeResult, Error, SwfMovie, SwfSlice, SwfStream};
use crate::vminterface::{AvmObject, Instantiator}; use crate::vminterface::{AvmObject, Instantiator};
@ -184,6 +185,9 @@ pub struct MovieClipData<'gc> {
/// List of tags queued up for the current frame. /// List of tags queued up for the current frame.
#[collect(require_static)] #[collect(require_static)]
queued_tags: HashMap<Depth, QueuedTagList>, queued_tags: HashMap<Depth, QueuedTagList>,
/// Attached audio (AVM1)
attached_audio: Option<NetStream<'gc>>,
} }
impl<'gc> MovieClip<'gc> { impl<'gc> MovieClip<'gc> {
@ -217,6 +221,7 @@ impl<'gc> MovieClip<'gc> {
#[cfg(feature = "timeline_debug")] #[cfg(feature = "timeline_debug")]
tag_frame_boundaries: Default::default(), tag_frame_boundaries: Default::default(),
queued_tags: HashMap::new(), queued_tags: HashMap::new(),
attached_audio: None,
}, },
)) ))
} }
@ -256,6 +261,7 @@ impl<'gc> MovieClip<'gc> {
#[cfg(feature = "timeline_debug")] #[cfg(feature = "timeline_debug")]
tag_frame_boundaries: Default::default(), tag_frame_boundaries: Default::default(),
queued_tags: HashMap::new(), queued_tags: HashMap::new(),
attached_audio: None,
}, },
)) ))
} }
@ -299,6 +305,7 @@ impl<'gc> MovieClip<'gc> {
#[cfg(feature = "timeline_debug")] #[cfg(feature = "timeline_debug")]
tag_frame_boundaries: Default::default(), tag_frame_boundaries: Default::default(),
queued_tags: HashMap::new(), queued_tags: HashMap::new(),
attached_audio: None,
}, },
)) ))
} }
@ -364,6 +371,7 @@ impl<'gc> MovieClip<'gc> {
#[cfg(feature = "timeline_debug")] #[cfg(feature = "timeline_debug")]
tag_frame_boundaries: Default::default(), tag_frame_boundaries: Default::default(),
queued_tags: HashMap::new(), queued_tags: HashMap::new(),
attached_audio: None,
}, },
)); ));
@ -2491,6 +2499,25 @@ impl<'gc> MovieClip<'gc> {
self.replace_with_movie(context, movie, self.is_root(), None); self.replace_with_movie(context, movie, self.is_root(), None);
} }
pub fn attach_audio(
self,
context: &mut UpdateContext<'_, 'gc>,
netstream: Option<NetStream<'gc>>,
) {
let mut write = self.0.write(context.gc_context);
if netstream != write.attached_audio {
if let Some(old_netstream) = write.attached_audio {
old_netstream.was_detached(context);
}
write.attached_audio = netstream;
if let Some(netstream) = netstream {
netstream.was_attached(context, self);
}
}
}
} }
impl<'gc> TDisplayObject<'gc> for MovieClip<'gc> { impl<'gc> TDisplayObject<'gc> for MovieClip<'gc> {

View File

@ -14,6 +14,7 @@ use crate::backend::audio::{DecodeError, SoundInstanceHandle};
use crate::backend::navigator::Request; use crate::backend::navigator::Request;
use crate::buffer::{Buffer, Substream}; use crate::buffer::{Buffer, Substream};
use crate::context::UpdateContext; use crate::context::UpdateContext;
use crate::display_object::MovieClip;
use crate::loader::Error; use crate::loader::Error;
use crate::string::AvmString; use crate::string::AvmString;
use crate::vminterface::AvmObject; use crate::vminterface::AvmObject;
@ -31,7 +32,30 @@ use ruffle_video::VideoStreamHandle;
use std::cmp::max; use std::cmp::max;
use std::io::Seek; use std::io::Seek;
use swf::{AudioCompression, SoundFormat, SoundStreamHead, VideoCodec, VideoDeblocking}; use swf::{AudioCompression, SoundFormat, SoundStreamHead, VideoCodec, VideoDeblocking};
use thiserror::Error;
use url::Url; use url::Url;
#[derive(Debug, Error)]
enum NetstreamError {
#[error("Decoding failed because {0}")]
DecodeError(DecodeError),
#[error("Could not play back audio and no error was given")]
NoPlayback,
#[error("Unknown codec")]
UnknownCodec,
#[error("AVM1 NetStream not attached to MovieClip")]
NotAttached,
}
impl From<DecodeError> for NetstreamError {
fn from(err: DecodeError) -> NetstreamError {
NetstreamError::DecodeError(err)
}
}
/// Manager for all media streams. /// Manager for all media streams.
/// ///
/// This does *not* handle data transport; which is delegated to `LoadManager`. /// This does *not* handle data transport; which is delegated to `LoadManager`.
@ -147,10 +171,6 @@ pub enum NetStreamType {
/// onto a table of data buffers like `Video` does, so we must maintain /// onto a table of data buffers like `Video` does, so we must maintain
/// frame IDs ourselves for various API related purposes. /// frame IDs ourselves for various API related purposes.
frame_id: u32, frame_id: u32,
/// The currently playing audio track's `Substream` and associated
/// audio instance.
audio_stream: Option<(Substream, SoundInstanceHandle)>,
}, },
} }
@ -197,6 +217,14 @@ pub struct NetStreamData<'gc> {
/// The URL of the requested FLV if one exists. /// The URL of the requested FLV if one exists.
url: Option<String>, url: Option<String>,
/// The currently playing audio track's `Substream` and associated
/// audio instance.
#[collect(require_static)]
audio_stream: Option<(Substream, SoundInstanceHandle)>,
/// The MovieClip this `NetStream` is attached to.
attached_to: Option<MovieClip<'gc>>,
} }
impl<'gc> NetStream<'gc> { impl<'gc> NetStream<'gc> {
@ -213,6 +241,8 @@ impl<'gc> NetStream<'gc> {
avm_object, avm_object,
avm2_client: None, avm2_client: None,
url: None, url: None,
audio_stream: None,
attached_to: None,
}, },
)) ))
} }
@ -298,6 +328,38 @@ impl<'gc> NetStream<'gc> {
StreamManager::toggle_paused(context, self); StreamManager::toggle_paused(context, self);
} }
/// Indicates that this `NetStream`'s audio was detached from a `MovieClip` (AVM1)
pub fn was_detached(self, context: &mut UpdateContext<'_, 'gc>) {
let mut write = self.0.write(context.gc_context);
if let Some((_substream, sound_instance)) = &write.audio_stream {
context
.audio_manager
.stop_sound(context.audio, *sound_instance);
}
write.audio_stream = None;
write.attached_to = None;
}
/// Indicates that this `NetStream`'s audio was attached to a `MovieClip` (AVM1)
pub fn was_attached(self, context: &mut UpdateContext<'_, 'gc>, clip: MovieClip<'gc>) {
let mut write = self.0.write(context.gc_context);
// A `NetStream` cannot be attached to two `MovieClip`s at once.
// Stop the old sound; the new one will stream at the next tag read.
// TODO: Change this to have `audio_manager` just switch the sound
// transforms around
if let Some((_substream, sound_instance)) = &write.audio_stream {
context
.audio_manager
.stop_sound(context.audio, *sound_instance);
}
write.audio_stream = None;
write.attached_to = Some(clip);
}
pub fn tick(self, context: &mut UpdateContext<'_, 'gc>, dt: f64) { pub fn tick(self, context: &mut UpdateContext<'_, 'gc>, dt: f64) {
#![allow(clippy::explicit_auto_deref)] //Erroneous lint #![allow(clippy::explicit_auto_deref)] //Erroneous lint
let mut write = self.0.write(context.gc_context); let mut write = self.0.write(context.gc_context);
@ -324,7 +386,6 @@ impl<'gc> NetStream<'gc> {
header, header,
video_stream: None, video_stream: None,
frame_id: 0, frame_id: 0,
audio_stream: None,
}); });
} }
Err(FlvError::EndOfData) => return, Err(FlvError::EndOfData) => return,
@ -409,17 +470,15 @@ impl<'gc> NetStream<'gc> {
sound_type, sound_type,
data, data,
}) => { }) => {
let mut substream = match &mut write.stream_type { let attached_to = write.attached_to;
Some(NetStreamType::Flv { let mut substream = match &mut write.audio_stream {
audio_stream: Some((substream, audio_handle)), Some((substream, audio_handle))
.. if context.audio.is_sound_playing(*audio_handle) =>
}) if context.audio.is_sound_playing(*audio_handle) => { {
substream.clone() substream.clone()
} }
Some(NetStreamType::Flv { audio_stream => {
audio_stream, //None or not playing //None or not playing
..
}) => {
let substream = Substream::new(slice.buffer().clone()); let substream = Substream::new(slice.buffer().clone());
let audio_handle = (|| { let audio_handle = (|| {
let swf_format = SoundFormat { let swf_format = SoundFormat {
@ -442,26 +501,18 @@ impl<'gc> NetStream<'gc> {
AudioCompression::Nellymoser AudioCompression::Nellymoser
} }
FlvSoundFormat::G711ALawPCM => { FlvSoundFormat::G711ALawPCM => {
return Err(DecodeError::UnhandledCompression( return Err(NetstreamError::UnknownCodec)
AudioCompression::Uncompressed,
))
} }
FlvSoundFormat::G711MuLawPCM => { FlvSoundFormat::G711MuLawPCM => {
return Err(DecodeError::UnhandledCompression( return Err(NetstreamError::UnknownCodec)
AudioCompression::Uncompressed,
))
} }
FlvSoundFormat::Aac => { FlvSoundFormat::Aac => {
return Err(DecodeError::UnhandledCompression( return Err(NetstreamError::UnknownCodec)
AudioCompression::Uncompressed,
))
} }
FlvSoundFormat::Speex => AudioCompression::Speex, FlvSoundFormat::Speex => AudioCompression::Speex,
FlvSoundFormat::MP38kHz => AudioCompression::Mp3, FlvSoundFormat::MP38kHz => AudioCompression::Mp3,
FlvSoundFormat::DeviceSpecific => { FlvSoundFormat::DeviceSpecific => {
return Err(DecodeError::UnhandledCompression( return Err(NetstreamError::UnknownCodec)
AudioCompression::Uncompressed,
))
} }
}, },
sample_rate: match (format, rate) { sample_rate: match (format, rate) {
@ -480,19 +531,36 @@ impl<'gc> NetStream<'gc> {
FlvSoundSize::Bits16 => true, FlvSoundSize::Bits16 => true,
}, },
}; };
context.audio.start_substream(
substream.clone(), let sound_stream_head = SoundStreamHead {
&SoundStreamHead { stream_format: swf_format.clone(),
stream_format: swf_format.clone(), playback_format: swf_format,
playback_format: swf_format, num_samples_per_block: 0,
num_samples_per_block: 0, latency_seek: 0,
latency_seek: 0, };
},
) if context.is_action_script_3() {
Ok(context.audio.start_substream(
substream.clone(),
&sound_stream_head,
)?)
} else if let Some(mc) = attached_to {
context
.audio_manager
.start_substream(
context.audio,
substream.clone(),
mc,
&sound_stream_head,
)
.ok_or(NetstreamError::NoPlayback)
} else {
return Err(NetstreamError::NotAttached);
}
})(); })();
if let Err(e) = audio_handle { if let Err(e) = audio_handle {
tracing::error!("Error encountered appending substream: {}", e); tracing::error!("Error encountered starting stream: {}", e);
} else { } else {
*audio_stream = *audio_stream =
Some((substream.clone(), audio_handle.unwrap())); Some((substream.clone(), audio_handle.unwrap()));
@ -500,7 +568,6 @@ impl<'gc> NetStream<'gc> {
substream substream
} }
_ => unreachable!(),
}; };
let result = match data { let result = match data {