diff --git a/core/src/avm1/globals/movie_clip.rs b/core/src/avm1/globals/movie_clip.rs index b612d4bf6..f540b362b 100644 --- a/core/src/avm1/globals/movie_clip.rs +++ b/core/src/avm1/globals/movie_clip.rs @@ -65,6 +65,7 @@ macro_rules! mc_setter { } const PROTO_DECLS: &[Declaration] = declare_properties! { + "attachAudio" => method(mc_method!(attach_audio); DONT_ENUM | DONT_DELETE | VERSION_6); "attachBitmap" => method(mc_method!(attach_bitmap); DONT_ENUM | DONT_DELETE | VERSION_8); "attachMovie" => method(mc_method!(attach_movie); DONT_ENUM | DONT_DELETE); "beginFill" => method(mc_method!(begin_fill); DONT_ENUM | DONT_DELETE | VERSION_6); @@ -286,6 +287,22 @@ fn attach_bitmap<'gc>( Ok(Value::Undefined) } +fn attach_audio<'gc>( + movie_clip: MovieClip<'gc>, + activation: &mut Activation<'_, 'gc>, + args: &[Value<'gc>], +) -> Result, Error<'gc>> { + if let [Value::Object(netstream_obj), ..] = args { + if let NativeObject::NetStream(netstream) = netstream_obj.native() { + movie_clip.attach_audio(&mut activation.context, Some(netstream)); + } + } else if let [Value::Bool(false), ..] = args { + movie_clip.attach_audio(&mut activation.context, None); + } + + Ok(Value::Undefined) +} + fn line_style<'gc>( movie_clip: MovieClip<'gc>, activation: &mut Activation<'_, 'gc>, diff --git a/core/src/display_object/movie_clip.rs b/core/src/display_object/movie_clip.rs index b11801988..9eb167dce 100644 --- a/core/src/display_object/movie_clip.rs +++ b/core/src/display_object/movie_clip.rs @@ -34,6 +34,7 @@ use crate::limits::ExecutionLimit; use crate::loader; use crate::loader::Loader; use crate::prelude::*; +use crate::streams::NetStream; use crate::string::{AvmString, SwfStrExt as _, WStr, WString}; use crate::tag_utils::{self, ControlFlow, DecodeResult, Error, SwfMovie, SwfSlice, SwfStream}; use crate::vminterface::{AvmObject, Instantiator}; @@ -184,6 +185,9 @@ pub struct MovieClipData<'gc> { /// List of tags queued up for the current frame. #[collect(require_static)] queued_tags: HashMap, + + /// Attached audio (AVM1) + attached_audio: Option>, } impl<'gc> MovieClip<'gc> { @@ -217,6 +221,7 @@ impl<'gc> MovieClip<'gc> { #[cfg(feature = "timeline_debug")] tag_frame_boundaries: Default::default(), queued_tags: HashMap::new(), + attached_audio: None, }, )) } @@ -256,6 +261,7 @@ impl<'gc> MovieClip<'gc> { #[cfg(feature = "timeline_debug")] tag_frame_boundaries: Default::default(), queued_tags: HashMap::new(), + attached_audio: None, }, )) } @@ -299,6 +305,7 @@ impl<'gc> MovieClip<'gc> { #[cfg(feature = "timeline_debug")] tag_frame_boundaries: Default::default(), queued_tags: HashMap::new(), + attached_audio: None, }, )) } @@ -364,6 +371,7 @@ impl<'gc> MovieClip<'gc> { #[cfg(feature = "timeline_debug")] tag_frame_boundaries: Default::default(), queued_tags: HashMap::new(), + attached_audio: None, }, )); @@ -2491,6 +2499,25 @@ impl<'gc> MovieClip<'gc> { self.replace_with_movie(context, movie, self.is_root(), None); } + + pub fn attach_audio( + self, + context: &mut UpdateContext<'_, 'gc>, + netstream: Option>, + ) { + let mut write = self.0.write(context.gc_context); + if netstream != write.attached_audio { + if let Some(old_netstream) = write.attached_audio { + old_netstream.was_detached(context); + } + + write.attached_audio = netstream; + + if let Some(netstream) = netstream { + netstream.was_attached(context, self); + } + } + } } impl<'gc> TDisplayObject<'gc> for MovieClip<'gc> { diff --git a/core/src/streams.rs b/core/src/streams.rs index f19580a01..b7efd8e7a 100644 --- a/core/src/streams.rs +++ b/core/src/streams.rs @@ -14,6 +14,7 @@ use crate::backend::audio::{DecodeError, SoundInstanceHandle}; use crate::backend::navigator::Request; use crate::buffer::{Buffer, Substream}; use crate::context::UpdateContext; +use crate::display_object::MovieClip; use crate::loader::Error; use crate::string::AvmString; use crate::vminterface::AvmObject; @@ -31,7 +32,30 @@ use ruffle_video::VideoStreamHandle; use std::cmp::max; use std::io::Seek; use swf::{AudioCompression, SoundFormat, SoundStreamHead, VideoCodec, VideoDeblocking}; +use thiserror::Error; use url::Url; + +#[derive(Debug, Error)] +enum NetstreamError { + #[error("Decoding failed because {0}")] + DecodeError(DecodeError), + + #[error("Could not play back audio and no error was given")] + NoPlayback, + + #[error("Unknown codec")] + UnknownCodec, + + #[error("AVM1 NetStream not attached to MovieClip")] + NotAttached, +} + +impl From for NetstreamError { + fn from(err: DecodeError) -> NetstreamError { + NetstreamError::DecodeError(err) + } +} + /// Manager for all media streams. /// /// This does *not* handle data transport; which is delegated to `LoadManager`. @@ -147,10 +171,6 @@ pub enum NetStreamType { /// onto a table of data buffers like `Video` does, so we must maintain /// frame IDs ourselves for various API related purposes. frame_id: u32, - - /// The currently playing audio track's `Substream` and associated - /// audio instance. - audio_stream: Option<(Substream, SoundInstanceHandle)>, }, } @@ -197,6 +217,14 @@ pub struct NetStreamData<'gc> { /// The URL of the requested FLV if one exists. url: Option, + + /// The currently playing audio track's `Substream` and associated + /// audio instance. + #[collect(require_static)] + audio_stream: Option<(Substream, SoundInstanceHandle)>, + + /// The MovieClip this `NetStream` is attached to. + attached_to: Option>, } impl<'gc> NetStream<'gc> { @@ -213,6 +241,8 @@ impl<'gc> NetStream<'gc> { avm_object, avm2_client: None, url: None, + audio_stream: None, + attached_to: None, }, )) } @@ -298,6 +328,38 @@ impl<'gc> NetStream<'gc> { StreamManager::toggle_paused(context, self); } + /// Indicates that this `NetStream`'s audio was detached from a `MovieClip` (AVM1) + pub fn was_detached(self, context: &mut UpdateContext<'_, 'gc>) { + let mut write = self.0.write(context.gc_context); + + if let Some((_substream, sound_instance)) = &write.audio_stream { + context + .audio_manager + .stop_sound(context.audio, *sound_instance); + } + + write.audio_stream = None; + write.attached_to = None; + } + + /// Indicates that this `NetStream`'s audio was attached to a `MovieClip` (AVM1) + pub fn was_attached(self, context: &mut UpdateContext<'_, 'gc>, clip: MovieClip<'gc>) { + let mut write = self.0.write(context.gc_context); + + // A `NetStream` cannot be attached to two `MovieClip`s at once. + // Stop the old sound; the new one will stream at the next tag read. + // TODO: Change this to have `audio_manager` just switch the sound + // transforms around + if let Some((_substream, sound_instance)) = &write.audio_stream { + context + .audio_manager + .stop_sound(context.audio, *sound_instance); + } + + write.audio_stream = None; + write.attached_to = Some(clip); + } + pub fn tick(self, context: &mut UpdateContext<'_, 'gc>, dt: f64) { #![allow(clippy::explicit_auto_deref)] //Erroneous lint let mut write = self.0.write(context.gc_context); @@ -324,7 +386,6 @@ impl<'gc> NetStream<'gc> { header, video_stream: None, frame_id: 0, - audio_stream: None, }); } Err(FlvError::EndOfData) => return, @@ -409,17 +470,15 @@ impl<'gc> NetStream<'gc> { sound_type, data, }) => { - let mut substream = match &mut write.stream_type { - Some(NetStreamType::Flv { - audio_stream: Some((substream, audio_handle)), - .. - }) if context.audio.is_sound_playing(*audio_handle) => { + let attached_to = write.attached_to; + let mut substream = match &mut write.audio_stream { + Some((substream, audio_handle)) + if context.audio.is_sound_playing(*audio_handle) => + { substream.clone() } - Some(NetStreamType::Flv { - audio_stream, //None or not playing - .. - }) => { + audio_stream => { + //None or not playing let substream = Substream::new(slice.buffer().clone()); let audio_handle = (|| { let swf_format = SoundFormat { @@ -442,26 +501,18 @@ impl<'gc> NetStream<'gc> { AudioCompression::Nellymoser } FlvSoundFormat::G711ALawPCM => { - return Err(DecodeError::UnhandledCompression( - AudioCompression::Uncompressed, - )) + return Err(NetstreamError::UnknownCodec) } FlvSoundFormat::G711MuLawPCM => { - return Err(DecodeError::UnhandledCompression( - AudioCompression::Uncompressed, - )) + return Err(NetstreamError::UnknownCodec) } FlvSoundFormat::Aac => { - return Err(DecodeError::UnhandledCompression( - AudioCompression::Uncompressed, - )) + return Err(NetstreamError::UnknownCodec) } FlvSoundFormat::Speex => AudioCompression::Speex, FlvSoundFormat::MP38kHz => AudioCompression::Mp3, FlvSoundFormat::DeviceSpecific => { - return Err(DecodeError::UnhandledCompression( - AudioCompression::Uncompressed, - )) + return Err(NetstreamError::UnknownCodec) } }, sample_rate: match (format, rate) { @@ -480,19 +531,36 @@ impl<'gc> NetStream<'gc> { FlvSoundSize::Bits16 => true, }, }; - context.audio.start_substream( - substream.clone(), - &SoundStreamHead { - stream_format: swf_format.clone(), - playback_format: swf_format, - num_samples_per_block: 0, - latency_seek: 0, - }, - ) + + let sound_stream_head = SoundStreamHead { + stream_format: swf_format.clone(), + playback_format: swf_format, + num_samples_per_block: 0, + latency_seek: 0, + }; + + if context.is_action_script_3() { + Ok(context.audio.start_substream( + substream.clone(), + &sound_stream_head, + )?) + } else if let Some(mc) = attached_to { + context + .audio_manager + .start_substream( + context.audio, + substream.clone(), + mc, + &sound_stream_head, + ) + .ok_or(NetstreamError::NoPlayback) + } else { + return Err(NetstreamError::NotAttached); + } })(); if let Err(e) = audio_handle { - tracing::error!("Error encountered appending substream: {}", e); + tracing::error!("Error encountered starting stream: {}", e); } else { *audio_stream = Some((substream.clone(), audio_handle.unwrap())); @@ -500,7 +568,6 @@ impl<'gc> NetStream<'gc> { substream } - _ => unreachable!(), }; let result = match data {