chore: Add rustfmt.toml and rustfmt pass

This commit is contained in:
Mike Welsh 2019-08-26 16:38:37 -07:00
parent 06d9f39c0e
commit 6a5c5ab1df
21 changed files with 6159 additions and 6094 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +1,2 @@
pub mod audio; pub mod audio;
pub mod render; pub mod render;

View File

@ -1,73 +1,73 @@
use generational_arena::{Arena, Index}; use generational_arena::{Arena, Index};
pub mod decoders; pub mod decoders;
pub mod swf { pub mod swf {
pub use swf::{read, AudioCompression, CharacterId, Sound, SoundFormat, SoundStreamHead}; pub use swf::{read, AudioCompression, CharacterId, Sound, SoundFormat, SoundStreamHead};
} }
pub type AudioStreamHandle = Index; pub type AudioStreamHandle = Index;
pub type SoundHandle = Index; pub type SoundHandle = Index;
type Error = Box<dyn std::error::Error>; type Error = Box<dyn std::error::Error>;
pub trait AudioBackend { pub trait AudioBackend {
fn prime_audio(&mut self) {} fn prime_audio(&mut self) {}
fn register_sound(&mut self, swf_sound: &swf::Sound) -> Result<SoundHandle, Error>; fn register_sound(&mut self, swf_sound: &swf::Sound) -> Result<SoundHandle, Error>;
fn preload_sound_stream_head( fn preload_sound_stream_head(
&mut self, &mut self,
_clip_id: swf::CharacterId, _clip_id: swf::CharacterId,
_stream_info: &swf::SoundStreamHead, _stream_info: &swf::SoundStreamHead,
) { ) {
} }
fn preload_sound_stream_block(&mut self, _clip_id: swf::CharacterId, _audio_data: &[u8]) {} fn preload_sound_stream_block(&mut self, _clip_id: swf::CharacterId, _audio_data: &[u8]) {}
fn preload_sound_stream_end(&mut self, _clip_id: swf::CharacterId) {} fn preload_sound_stream_end(&mut self, _clip_id: swf::CharacterId) {}
fn play_sound(&mut self, sound: SoundHandle); fn play_sound(&mut self, sound: SoundHandle);
fn start_stream( fn start_stream(
&mut self, &mut self,
clip_id: crate::prelude::CharacterId, clip_id: crate::prelude::CharacterId,
clip_data: crate::tag_utils::SwfSlice, clip_data: crate::tag_utils::SwfSlice,
handle: &swf::SoundStreamHead, handle: &swf::SoundStreamHead,
) -> AudioStreamHandle; ) -> AudioStreamHandle;
// TODO: Eventually remove this/move it to library. // TODO: Eventually remove this/move it to library.
fn is_loading_complete(&self) -> bool { fn is_loading_complete(&self) -> bool {
true true
} }
fn tick(&mut self) {} fn tick(&mut self) {}
} }
pub struct NullAudioBackend { pub struct NullAudioBackend {
sounds: Arena<()>, sounds: Arena<()>,
streams: Arena<()>, streams: Arena<()>,
} }
impl NullAudioBackend { impl NullAudioBackend {
pub fn new() -> NullAudioBackend { pub fn new() -> NullAudioBackend {
NullAudioBackend { NullAudioBackend {
streams: Arena::new(), streams: Arena::new(),
sounds: Arena::new(), sounds: Arena::new(),
} }
} }
} }
impl AudioBackend for NullAudioBackend { impl AudioBackend for NullAudioBackend {
fn register_sound(&mut self, _sound: &swf::Sound) -> Result<SoundHandle, Error> { fn register_sound(&mut self, _sound: &swf::Sound) -> Result<SoundHandle, Error> {
Ok(self.sounds.insert(())) Ok(self.sounds.insert(()))
} }
fn play_sound(&mut self, _sound: SoundHandle) {} fn play_sound(&mut self, _sound: SoundHandle) {}
fn start_stream( fn start_stream(
&mut self, &mut self,
_clip_id: crate::prelude::CharacterId, _clip_id: crate::prelude::CharacterId,
_clip_data: crate::tag_utils::SwfSlice, _clip_data: crate::tag_utils::SwfSlice,
_handle: &swf::SoundStreamHead, _handle: &swf::SoundStreamHead,
) -> AudioStreamHandle { ) -> AudioStreamHandle {
self.streams.insert(()) self.streams.insert(())
} }
} }
impl Default for NullAudioBackend { impl Default for NullAudioBackend {
fn default() -> Self { fn default() -> Self {
NullAudioBackend::new() NullAudioBackend::new()
} }
} }

View File

@ -1,76 +1,76 @@
mod adpcm; mod adpcm;
mod mp3; mod mp3;
pub use adpcm::AdpcmDecoder; pub use adpcm::AdpcmDecoder;
pub use mp3::Mp3Decoder; pub use mp3::Mp3Decoder;
pub trait Decoder: Iterator<Item = i16> { pub trait Decoder: Iterator<Item = i16> {
fn num_channels(&self) -> u8; fn num_channels(&self) -> u8;
fn sample_rate(&self) -> u16; fn sample_rate(&self) -> u16;
} }
pub fn stream_tag_reader( pub fn stream_tag_reader(
swf_data: crate::tag_utils::SwfSlice, swf_data: crate::tag_utils::SwfSlice,
) -> IterRead<impl Iterator<Item = u8>> { ) -> IterRead<impl Iterator<Item = u8>> {
use std::io::{Cursor, Read}; use std::io::{Cursor, Read};
use swf::TagCode; use swf::TagCode;
let mut reader = swf::read::Reader::new(Cursor::new(swf_data), 8); let mut reader = swf::read::Reader::new(Cursor::new(swf_data), 8);
let mut audio_data = vec![]; let mut audio_data = vec![];
let mut cur_byte = 0; let mut cur_byte = 0;
let mut frame = 1; let mut frame = 1;
let iter = std::iter::from_fn(move || { let iter = std::iter::from_fn(move || {
if cur_byte >= audio_data.len() { if cur_byte >= audio_data.len() {
cur_byte = 0; cur_byte = 0;
let tag_callback = let tag_callback =
|reader: &mut swf::read::Reader<Cursor<crate::tag_utils::SwfSlice>>, |reader: &mut swf::read::Reader<Cursor<crate::tag_utils::SwfSlice>>,
tag_code, tag_code,
tag_len| match tag_code { tag_len| match tag_code {
TagCode::ShowFrame => { TagCode::ShowFrame => {
frame += 1; frame += 1;
Ok(()) Ok(())
} }
TagCode::SoundStreamBlock => { TagCode::SoundStreamBlock => {
audio_data.clear(); audio_data.clear();
let mut data = vec![]; let mut data = vec![];
reader reader
.get_mut() .get_mut()
.take(tag_len as u64) .take(tag_len as u64)
.read_to_end(&mut data)?; .read_to_end(&mut data)?;
audio_data.extend(data[4..].iter()); audio_data.extend(data[4..].iter());
Ok(()) Ok(())
} }
_ => Ok(()), _ => Ok(()),
}; };
let _ = let _ =
crate::tag_utils::decode_tags(&mut reader, tag_callback, TagCode::SoundStreamBlock); crate::tag_utils::decode_tags(&mut reader, tag_callback, TagCode::SoundStreamBlock);
} }
if cur_byte < audio_data.len() { if cur_byte < audio_data.len() {
let byte = audio_data[cur_byte]; let byte = audio_data[cur_byte];
cur_byte += 1; cur_byte += 1;
Some(byte) Some(byte)
} else { } else {
None None
} }
}); });
IterRead(iter) IterRead(iter)
} }
pub struct IterRead<I: Iterator<Item = u8>>(I); pub struct IterRead<I: Iterator<Item = u8>>(I);
impl<I: Iterator<Item = u8>> std::io::Read for IterRead<I> { impl<I: Iterator<Item = u8>> std::io::Read for IterRead<I> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let mut n = 0; let mut n = 0;
for out in buf { for out in buf {
if let Some(v) = self.0.next() { if let Some(v) = self.0.next() {
*out = v; *out = v;
n += 1; n += 1;
} else { } else {
break; break;
} }
} }
Ok(n) Ok(n)
} }
} }

View File

@ -1,172 +1,172 @@
use super::Decoder; use super::Decoder;
use bitstream_io::{BigEndian, BitReader}; use bitstream_io::{BigEndian, BitReader};
use std::io::Read; use std::io::Read;
pub struct AdpcmDecoder<R: Read> { pub struct AdpcmDecoder<R: Read> {
inner: BitReader<R, BigEndian>, inner: BitReader<R, BigEndian>,
sample_rate: u16, sample_rate: u16,
is_stereo: bool, is_stereo: bool,
bits_per_sample: usize, bits_per_sample: usize,
sample_num: u16, sample_num: u16,
left_sample: i32, left_sample: i32,
left_step_index: i16, left_step_index: i16,
left_step: i32, left_step: i32,
right_sample: i32, right_sample: i32,
right_step_index: i16, right_step_index: i16,
right_step: i32, right_step: i32,
cur_channel: u8, cur_channel: u8,
} }
impl<R: Read> AdpcmDecoder<R> { impl<R: Read> AdpcmDecoder<R> {
const INDEX_TABLE: [&'static [i16]; 4] = [ const INDEX_TABLE: [&'static [i16]; 4] = [
&[-1, 2], &[-1, 2],
&[-1, -1, 2, 4], &[-1, -1, 2, 4],
&[-1, -1, -1, -1, 2, 4, 6, 8], &[-1, -1, -1, -1, 2, 4, 6, 8],
&[-1, -1, -1, -1, -1, -1, -1, -1, 1, 2, 4, 6, 8, 10, 13, 16], &[-1, -1, -1, -1, -1, -1, -1, -1, 1, 2, 4, 6, 8, 10, 13, 16],
]; ];
const STEP_TABLE: [i32; 89] = [ const STEP_TABLE: [i32; 89] = [
7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45, 50, 55, 60, 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45, 50, 55, 60,
66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230, 253, 279, 307, 337, 371, 66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230, 253, 279, 307, 337, 371,
408, 449, 494, 544, 598, 658, 724, 796, 876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878, 408, 449, 494, 544, 598, 658, 724, 796, 876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878,
2066, 2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845, 2066, 2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845,
8630, 9493, 10442, 11487, 12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086, 8630, 9493, 10442, 11487, 12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086,
29794, 32767, 29794, 32767,
]; ];
pub fn new(inner: R, is_stereo: bool, sample_rate: u16) -> Result<Self, std::io::Error> { pub fn new(inner: R, is_stereo: bool, sample_rate: u16) -> Result<Self, std::io::Error> {
let mut reader = BitReader::new(inner); let mut reader = BitReader::new(inner);
let bits_per_sample = reader.read::<u8>(2)? as usize + 2; let bits_per_sample = reader.read::<u8>(2)? as usize + 2;
let left_sample = 0; let left_sample = 0;
let left_step_index = 0; let left_step_index = 0;
let left_step = 0; let left_step = 0;
let right_sample = 0; let right_sample = 0;
let right_step_index = 0; let right_step_index = 0;
let right_step = 0; let right_step = 0;
Ok(Self { Ok(Self {
inner: reader, inner: reader,
sample_rate, sample_rate,
is_stereo, is_stereo,
bits_per_sample, bits_per_sample,
sample_num: 0, sample_num: 0,
left_sample, left_sample,
left_step, left_step,
left_step_index, left_step_index,
right_sample, right_sample,
right_step, right_step,
right_step_index, right_step_index,
cur_channel: 2, cur_channel: 2,
}) })
} }
pub fn next_sample(&mut self) -> Result<(), std::io::Error> { pub fn next_sample(&mut self) -> Result<(), std::io::Error> {
self.cur_channel = 0; self.cur_channel = 0;
if self.sample_num == 0 { if self.sample_num == 0 {
// The initial sample values are NOT byte-aligned. // The initial sample values are NOT byte-aligned.
self.left_sample = self.inner.read_signed(16)?; self.left_sample = self.inner.read_signed(16)?;
self.left_step_index = self.inner.read::<u16>(6)? as i16; self.left_step_index = self.inner.read::<u16>(6)? as i16;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize]; self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
if self.is_stereo { if self.is_stereo {
self.right_sample = self.inner.read_signed(16)?; self.right_sample = self.inner.read_signed(16)?;
self.right_step_index = self.inner.read::<u16>(6)? as i16; self.right_step_index = self.inner.read::<u16>(6)? as i16;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize]; self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
} }
} }
self.sample_num = (self.sample_num + 1) % 4095; self.sample_num = (self.sample_num + 1) % 4095;
let data: i32 = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32; let data: i32 = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize]; self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
// (data + 0.5) * step / 2^(bits_per_sample - 2) // (data + 0.5) * step / 2^(bits_per_sample - 2)
// Data is sign-magnitude, NOT two's complement. // Data is sign-magnitude, NOT two's complement.
// TODO(Herschel): Other implementations use some bit-tricks for this. // TODO(Herschel): Other implementations use some bit-tricks for this.
let sign_mask = 1 << (self.bits_per_sample - 1); let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask; let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.left_step / sign_mask; let delta = (2 * magnitude + 1) * self.left_step / sign_mask;
if (data & sign_mask) != 0 { if (data & sign_mask) != 0 {
self.left_sample -= delta; self.left_sample -= delta;
} else { } else {
self.left_sample += delta; self.left_sample += delta;
} }
if self.left_sample < -32768 { if self.left_sample < -32768 {
self.left_sample = 32768; self.left_sample = 32768;
} else if self.left_sample > 32767 { } else if self.left_sample > 32767 {
self.left_sample = 32767; self.left_sample = 32767;
} }
let i = magnitude as usize; let i = magnitude as usize;
self.left_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i]; self.left_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.left_step_index < 0 { if self.left_step_index < 0 {
self.left_step_index = 0; self.left_step_index = 0;
} else if self.left_step_index >= Self::STEP_TABLE.len() as i16 { } else if self.left_step_index >= Self::STEP_TABLE.len() as i16 {
self.left_step_index = Self::STEP_TABLE.len() as i16 - 1; self.left_step_index = Self::STEP_TABLE.len() as i16 - 1;
} }
if self.is_stereo { if self.is_stereo {
let data = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32; let data = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize]; self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
let sign_mask = 1 << (self.bits_per_sample - 1); let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask; let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.right_step / sign_mask; let delta = (2 * magnitude + 1) * self.right_step / sign_mask;
if (data & sign_mask) != 0 { if (data & sign_mask) != 0 {
self.right_sample -= delta; self.right_sample -= delta;
} else { } else {
self.right_sample += delta; self.right_sample += delta;
} }
if self.right_sample < -32768 { if self.right_sample < -32768 {
self.right_sample = 32768; self.right_sample = 32768;
} else if self.right_sample > 32767 { } else if self.right_sample > 32767 {
self.right_sample = 32767; self.right_sample = 32767;
} }
let i = magnitude as usize; let i = magnitude as usize;
self.right_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i]; self.right_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.right_step_index < 0 { if self.right_step_index < 0 {
self.right_step_index = 0; self.right_step_index = 0;
} else if self.right_step_index >= Self::STEP_TABLE.len() as i16 { } else if self.right_step_index >= Self::STEP_TABLE.len() as i16 {
self.right_step_index = Self::STEP_TABLE.len() as i16 - 1; self.right_step_index = Self::STEP_TABLE.len() as i16 - 1;
} }
} }
Ok(()) Ok(())
} }
} }
impl<R: Read> Iterator for AdpcmDecoder<R> { impl<R: Read> Iterator for AdpcmDecoder<R> {
type Item = i16; type Item = i16;
fn next(&mut self) -> Option<i16> { fn next(&mut self) -> Option<i16> {
if self.cur_channel >= if self.is_stereo { 2 } else { 1 } { if self.cur_channel >= if self.is_stereo { 2 } else { 1 } {
self.next_sample().ok()?; self.next_sample().ok()?;
} }
let sample = if self.cur_channel == 0 { let sample = if self.cur_channel == 0 {
self.left_sample self.left_sample
} else { } else {
self.right_sample self.right_sample
}; };
self.cur_channel += 1; self.cur_channel += 1;
Some(sample as i16) Some(sample as i16)
} }
} }
impl<R: std::io::Read> Decoder for AdpcmDecoder<R> { impl<R: std::io::Read> Decoder for AdpcmDecoder<R> {
#[inline] #[inline]
fn num_channels(&self) -> u8 { fn num_channels(&self) -> u8 {
if self.is_stereo { if self.is_stereo {
2 2
} else { } else {
1 1
} }
} }
#[inline] #[inline]
fn sample_rate(&self) -> u16 { fn sample_rate(&self) -> u16 {
self.sample_rate self.sample_rate
} }
} }

View File

@ -1,124 +1,124 @@
#[cfg(feature = "minimp3")] #[cfg(feature = "minimp3")]
#[allow(dead_code)] #[allow(dead_code)]
pub struct Mp3Decoder<R: std::io::Read> { pub struct Mp3Decoder<R: std::io::Read> {
decoder: minimp3::Decoder<R>, decoder: minimp3::Decoder<R>,
sample_rate: u32, sample_rate: u32,
num_channels: u16, num_channels: u16,
cur_frame: minimp3::Frame, cur_frame: minimp3::Frame,
cur_sample: usize, cur_sample: usize,
num_samples: usize, num_samples: usize,
} }
#[cfg(feature = "minimp3")] #[cfg(feature = "minimp3")]
impl<R: std::io::Read> Mp3Decoder<R> { impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self { pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder { Mp3Decoder {
decoder: minimp3::Decoder::new(reader), decoder: minimp3::Decoder::new(reader),
num_channels, num_channels,
sample_rate, sample_rate,
cur_frame: unsafe { std::mem::zeroed::<minimp3::Frame>() }, cur_frame: unsafe { std::mem::zeroed::<minimp3::Frame>() },
cur_sample: 0, cur_sample: 0,
num_samples: 0, num_samples: 0,
} }
} }
fn next_frame(&mut self) { fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() { if let Ok(frame) = self.decoder.next_frame() {
self.num_samples = frame.data.len(); self.num_samples = frame.data.len();
self.cur_frame = frame; self.cur_frame = frame;
} else { } else {
self.num_samples = 0; self.num_samples = 0;
} }
self.cur_sample = 0; self.cur_sample = 0;
} }
} }
#[cfg(feature = "minimp3")] #[cfg(feature = "minimp3")]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> { impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16; type Item = i16;
#[inline] #[inline]
fn next(&mut self) -> Option<i16> { fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.num_samples { if self.cur_sample >= self.num_samples {
self.next_frame(); self.next_frame();
} }
if self.num_samples > 0 { if self.num_samples > 0 {
let sample = self.cur_frame.data[self.cur_sample]; let sample = self.cur_frame.data[self.cur_sample];
self.cur_sample += 1; self.cur_sample += 1;
Some(sample) Some(sample)
} else { } else {
None None
} }
} }
} }
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))] #[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
pub struct Mp3Decoder<R: std::io::Read> { pub struct Mp3Decoder<R: std::io::Read> {
decoder: puremp3::Mp3Decoder<R>, decoder: puremp3::Mp3Decoder<R>,
sample_rate: u32, sample_rate: u32,
num_channels: u16, num_channels: u16,
cur_frame: puremp3::Frame, cur_frame: puremp3::Frame,
cur_sample: usize, cur_sample: usize,
cur_channel: usize, cur_channel: usize,
} }
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))] #[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Mp3Decoder<R> { impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self { pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder { Mp3Decoder {
decoder: puremp3::Mp3Decoder::new(reader), decoder: puremp3::Mp3Decoder::new(reader),
num_channels, num_channels,
sample_rate, sample_rate,
cur_frame: unsafe { std::mem::zeroed::<puremp3::Frame>() }, cur_frame: unsafe { std::mem::zeroed::<puremp3::Frame>() },
cur_sample: 0, cur_sample: 0,
cur_channel: 0, cur_channel: 0,
} }
} }
fn next_frame(&mut self) { fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() { if let Ok(frame) = self.decoder.next_frame() {
self.cur_frame = frame; self.cur_frame = frame;
} else { } else {
self.cur_frame.num_samples = 0; self.cur_frame.num_samples = 0;
} }
self.cur_sample = 0; self.cur_sample = 0;
self.cur_channel = 0; self.cur_channel = 0;
} }
} }
impl<R: std::io::Read> super::Decoder for Mp3Decoder<R> { impl<R: std::io::Read> super::Decoder for Mp3Decoder<R> {
#[inline] #[inline]
fn num_channels(&self) -> u8 { fn num_channels(&self) -> u8 {
self.num_channels as u8 self.num_channels as u8
} }
#[inline] #[inline]
fn sample_rate(&self) -> u16 { fn sample_rate(&self) -> u16 {
self.sample_rate as u16 self.sample_rate as u16
} }
} }
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))] #[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> { impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16; type Item = i16;
#[inline] #[inline]
fn next(&mut self) -> Option<i16> { fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.cur_frame.num_samples { if self.cur_sample >= self.cur_frame.num_samples {
self.next_frame(); self.next_frame();
} }
if self.cur_frame.num_samples > 0 { if self.cur_frame.num_samples > 0 {
let sample = self.cur_frame.samples[self.cur_channel][self.cur_sample]; let sample = self.cur_frame.samples[self.cur_channel][self.cur_sample];
self.cur_channel += 1; self.cur_channel += 1;
if self.cur_channel >= usize::from(self.num_channels) { if self.cur_channel >= usize::from(self.num_channels) {
self.cur_channel = 0; self.cur_channel = 0;
self.cur_sample += 1; self.cur_sample += 1;
} }
Some((sample * 32767.0) as i16) Some((sample * 32767.0) as i16)
} else { } else {
None None
} }
} }
} }

View File

@ -1,278 +1,278 @@
pub use crate::{transform::Transform, Color}; pub use crate::{transform::Transform, Color};
use std::io::Read; use std::io::Read;
pub use swf; pub use swf;
pub trait RenderBackend { pub trait RenderBackend {
fn set_viewport_dimensions(&mut self, width: u32, height: u32); fn set_viewport_dimensions(&mut self, width: u32, height: u32);
fn register_shape(&mut self, shape: &swf::Shape) -> ShapeHandle; fn register_shape(&mut self, shape: &swf::Shape) -> ShapeHandle;
fn register_glyph_shape(&mut self, shape: &swf::Glyph) -> ShapeHandle; fn register_glyph_shape(&mut self, shape: &swf::Glyph) -> ShapeHandle;
fn register_bitmap_jpeg( fn register_bitmap_jpeg(
&mut self, &mut self,
id: swf::CharacterId, id: swf::CharacterId,
data: &[u8], data: &[u8],
jpeg_tables: &[u8], jpeg_tables: &[u8],
) -> BitmapHandle; ) -> BitmapHandle;
fn register_bitmap_jpeg_2(&mut self, id: swf::CharacterId, data: &[u8]) -> BitmapHandle; fn register_bitmap_jpeg_2(&mut self, id: swf::CharacterId, data: &[u8]) -> BitmapHandle;
fn register_bitmap_jpeg_3( fn register_bitmap_jpeg_3(
&mut self, &mut self,
id: swf::CharacterId, id: swf::CharacterId,
jpeg_data: &[u8], jpeg_data: &[u8],
alpha_data: &[u8], alpha_data: &[u8],
) -> BitmapHandle; ) -> BitmapHandle;
fn register_bitmap_png(&mut self, swf_tag: &swf::DefineBitsLossless) -> BitmapHandle; fn register_bitmap_png(&mut self, swf_tag: &swf::DefineBitsLossless) -> BitmapHandle;
fn begin_frame(&mut self); fn begin_frame(&mut self);
fn clear(&mut self, color: Color); fn clear(&mut self, color: Color);
fn render_shape(&mut self, shape: ShapeHandle, transform: &Transform); fn render_shape(&mut self, shape: ShapeHandle, transform: &Transform);
fn end_frame(&mut self); fn end_frame(&mut self);
fn draw_pause_overlay(&mut self); fn draw_pause_overlay(&mut self);
fn draw_letterbox(&mut self, letterbox: Letterbox); fn draw_letterbox(&mut self, letterbox: Letterbox);
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct ShapeHandle(pub usize); pub struct ShapeHandle(pub usize);
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct BitmapHandle(pub usize); pub struct BitmapHandle(pub usize);
#[derive(Debug, Copy, Clone, PartialEq)] #[derive(Debug, Copy, Clone, PartialEq)]
pub enum Letterbox { pub enum Letterbox {
None, None,
Letterbox(f32), Letterbox(f32),
Pillarbox(f32), Pillarbox(f32),
} }
pub struct NullRenderer; pub struct NullRenderer;
impl RenderBackend for NullRenderer { impl RenderBackend for NullRenderer {
fn set_viewport_dimensions(&mut self, _width: u32, _height: u32) {} fn set_viewport_dimensions(&mut self, _width: u32, _height: u32) {}
fn register_shape(&mut self, _shape: &swf::Shape) -> ShapeHandle { fn register_shape(&mut self, _shape: &swf::Shape) -> ShapeHandle {
ShapeHandle(0) ShapeHandle(0)
} }
fn register_glyph_shape(&mut self, _shape: &swf::Glyph) -> ShapeHandle { fn register_glyph_shape(&mut self, _shape: &swf::Glyph) -> ShapeHandle {
ShapeHandle(0) ShapeHandle(0)
} }
fn register_bitmap_jpeg( fn register_bitmap_jpeg(
&mut self, &mut self,
_id: swf::CharacterId, _id: swf::CharacterId,
_data: &[u8], _data: &[u8],
_jpeg_tables: &[u8], _jpeg_tables: &[u8],
) -> BitmapHandle { ) -> BitmapHandle {
BitmapHandle(0) BitmapHandle(0)
} }
fn register_bitmap_jpeg_2(&mut self, _id: swf::CharacterId, _data: &[u8]) -> BitmapHandle { fn register_bitmap_jpeg_2(&mut self, _id: swf::CharacterId, _data: &[u8]) -> BitmapHandle {
BitmapHandle(0) BitmapHandle(0)
} }
fn register_bitmap_jpeg_3( fn register_bitmap_jpeg_3(
&mut self, &mut self,
_id: swf::CharacterId, _id: swf::CharacterId,
_data: &[u8], _data: &[u8],
_alpha_data: &[u8], _alpha_data: &[u8],
) -> BitmapHandle { ) -> BitmapHandle {
BitmapHandle(0) BitmapHandle(0)
} }
fn register_bitmap_png(&mut self, _swf_tag: &swf::DefineBitsLossless) -> BitmapHandle { fn register_bitmap_png(&mut self, _swf_tag: &swf::DefineBitsLossless) -> BitmapHandle {
BitmapHandle(0) BitmapHandle(0)
} }
fn begin_frame(&mut self) {} fn begin_frame(&mut self) {}
fn end_frame(&mut self) {} fn end_frame(&mut self) {}
fn clear(&mut self, _color: Color) {} fn clear(&mut self, _color: Color) {}
fn render_shape(&mut self, _shape: ShapeHandle, _transform: &Transform) {} fn render_shape(&mut self, _shape: ShapeHandle, _transform: &Transform) {}
fn draw_pause_overlay(&mut self) {} fn draw_pause_overlay(&mut self) {}
fn draw_letterbox(&mut self, _letterbox: Letterbox) {} fn draw_letterbox(&mut self, _letterbox: Letterbox) {}
} }
pub fn glue_swf_jpeg_to_tables(jpeg_tables: &[u8], jpeg_data: &[u8]) -> Vec<u8> { pub fn glue_swf_jpeg_to_tables(jpeg_tables: &[u8], jpeg_data: &[u8]) -> Vec<u8> {
let mut full_jpeg = Vec::with_capacity(jpeg_tables.len() + jpeg_data.len() - 4); let mut full_jpeg = Vec::with_capacity(jpeg_tables.len() + jpeg_data.len() - 4);
full_jpeg.extend_from_slice(&jpeg_tables[..jpeg_tables.len() - 2]); full_jpeg.extend_from_slice(&jpeg_tables[..jpeg_tables.len() - 2]);
full_jpeg.extend_from_slice(&jpeg_data[2..]); full_jpeg.extend_from_slice(&jpeg_data[2..]);
full_jpeg full_jpeg
} }
/// Removes potential invalid JPEG data from SWF DefineBitsJPEG tags. /// Removes potential invalid JPEG data from SWF DefineBitsJPEG tags.
/// ///
/// SWF19 p.138: /// SWF19 p.138:
/// "Before version 8 of the SWF file format, SWF files could contain an erroneous header of 0xFF, 0xD9, 0xFF, 0xD8 before the JPEG SOI marker." /// "Before version 8 of the SWF file format, SWF files could contain an erroneous header of 0xFF, 0xD9, 0xFF, 0xD8 before the JPEG SOI marker."
/// These bytes need to be removed for the JPEG to decode properly. /// These bytes need to be removed for the JPEG to decode properly.
pub fn remove_invalid_jpeg_data(mut data: &[u8]) -> std::borrow::Cow<[u8]> { pub fn remove_invalid_jpeg_data(mut data: &[u8]) -> std::borrow::Cow<[u8]> {
// TODO: Might be better to return an Box<Iterator<Item=u8>> instead of a Cow here, // TODO: Might be better to return an Box<Iterator<Item=u8>> instead of a Cow here,
// where the spliced iter is a data[..n].chain(data[n+4..])? // where the spliced iter is a data[..n].chain(data[n+4..])?
if data[..4] == [0xFF, 0xD9, 0xFF, 0xD8] { if data[..4] == [0xFF, 0xD9, 0xFF, 0xD8] {
data = &data[4..]; data = &data[4..];
} }
if let Some(pos) = (0..data.len() - 4).find(|&n| data[n..n + 4] == [0xFF, 0xD9, 0xFF, 0xD8]) { if let Some(pos) = (0..data.len() - 4).find(|&n| data[n..n + 4] == [0xFF, 0xD9, 0xFF, 0xD8]) {
let mut out_data = Vec::with_capacity(data.len() - 4); let mut out_data = Vec::with_capacity(data.len() - 4);
out_data.extend_from_slice(&data[..pos]); out_data.extend_from_slice(&data[..pos]);
out_data.extend_from_slice(&data[pos + 4..]); out_data.extend_from_slice(&data[pos + 4..]);
std::borrow::Cow::from(out_data) std::borrow::Cow::from(out_data)
} else { } else {
std::borrow::Cow::Borrowed(data) std::borrow::Cow::Borrowed(data)
} }
} }
/// Decodes a JPEG with optional alpha data. /// Decodes a JPEG with optional alpha data.
/// ///
pub fn define_bits_jpeg_to_rgba( pub fn define_bits_jpeg_to_rgba(
jpeg_data: &[u8], jpeg_data: &[u8],
alpha_data: &[u8], alpha_data: &[u8],
) -> Result<(u32, u32, Vec<u8>), Box<dyn std::error::Error>> { ) -> Result<(u32, u32, Vec<u8>), Box<dyn std::error::Error>> {
let jpeg_data = remove_invalid_jpeg_data(jpeg_data); let jpeg_data = remove_invalid_jpeg_data(jpeg_data);
let mut decoder = jpeg_decoder::Decoder::new(&jpeg_data[..]); let mut decoder = jpeg_decoder::Decoder::new(&jpeg_data[..]);
decoder.read_info().unwrap(); decoder.read_info().unwrap();
let metadata = decoder.info().unwrap(); let metadata = decoder.info().unwrap();
let decoded_data = decoder.decode().expect("failed to decode image"); let decoded_data = decoder.decode().expect("failed to decode image");
// Decompress the alpha data (DEFLATE compression). // Decompress the alpha data (DEFLATE compression).
let alpha_data = { let alpha_data = {
let mut data = vec![]; let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(alpha_data)?; let mut decoder = libflate::zlib::Decoder::new(alpha_data)?;
decoder.read_to_end(&mut data)?; decoder.read_to_end(&mut data)?;
data data
}; };
let mut rgba = Vec::with_capacity((decoded_data.len() / 3) * 4); let mut rgba = Vec::with_capacity((decoded_data.len() / 3) * 4);
let mut i = 0; let mut i = 0;
let mut a = 0; let mut a = 0;
while i < decoded_data.len() { while i < decoded_data.len() {
rgba.push(decoded_data[i]); rgba.push(decoded_data[i]);
rgba.push(decoded_data[i + 1]); rgba.push(decoded_data[i + 1]);
rgba.push(decoded_data[i + 2]); rgba.push(decoded_data[i + 2]);
rgba.push(alpha_data[a]); rgba.push(alpha_data[a]);
i += 3; i += 3;
a += 1; a += 1;
} }
Ok((metadata.width.into(), metadata.height.into(), rgba)) Ok((metadata.width.into(), metadata.height.into(), rgba))
} }
/// Decodes the bitmap data in DefineBitsLossless tag into RGBA. /// Decodes the bitmap data in DefineBitsLossless tag into RGBA.
/// DefineBitsLossless is Zlib encoded pixel data (similar to PNG), possibly /// DefineBitsLossless is Zlib encoded pixel data (similar to PNG), possibly
/// palletized. /// palletized.
pub fn define_bits_lossless_to_rgba( pub fn define_bits_lossless_to_rgba(
swf_tag: &swf::DefineBitsLossless, swf_tag: &swf::DefineBitsLossless,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> { ) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
// Decompress the image data (DEFLATE compression). // Decompress the image data (DEFLATE compression).
let mut decoded_data = { let mut decoded_data = {
let mut data = vec![]; let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(&swf_tag.data[..])?; let mut decoder = libflate::zlib::Decoder::new(&swf_tag.data[..])?;
decoder.read_to_end(&mut data)?; decoder.read_to_end(&mut data)?;
data data
}; };
// Swizzle/de-palettize the bitmap. // Swizzle/de-palettize the bitmap.
let out_data = match (swf_tag.version, swf_tag.format) { let out_data = match (swf_tag.version, swf_tag.format) {
(1, swf::BitmapFormat::Rgb15) => unimplemented!("15-bit PNG"), (1, swf::BitmapFormat::Rgb15) => unimplemented!("15-bit PNG"),
(1, swf::BitmapFormat::Rgb32) => { (1, swf::BitmapFormat::Rgb32) => {
let mut i = 0; let mut i = 0;
while i < decoded_data.len() { while i < decoded_data.len() {
decoded_data[i] = decoded_data[i + 1]; decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2]; decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3]; decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = 0xff; decoded_data[i + 3] = 0xff;
i += 4; i += 4;
} }
decoded_data decoded_data
} }
(2, swf::BitmapFormat::Rgb32) => { (2, swf::BitmapFormat::Rgb32) => {
let mut i = 0; let mut i = 0;
while i < decoded_data.len() { while i < decoded_data.len() {
let alpha = decoded_data[i]; let alpha = decoded_data[i];
decoded_data[i] = decoded_data[i + 1]; decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2]; decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3]; decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = alpha; decoded_data[i + 3] = alpha;
i += 4; i += 4;
} }
decoded_data decoded_data
} }
(1, swf::BitmapFormat::ColorMap8) => { (1, swf::BitmapFormat::ColorMap8) => {
let mut i = 0; let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11; let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1); let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors { for _ in 0..=swf_tag.num_colors {
palette.push(Color { palette.push(Color {
r: decoded_data[i], r: decoded_data[i],
g: decoded_data[i + 1], g: decoded_data[i + 1],
b: decoded_data[i + 2], b: decoded_data[i + 2],
a: 255, a: 255,
}); });
i += 3; i += 3;
} }
let mut out_data = vec![]; let mut out_data = vec![];
for _ in 0..swf_tag.height { for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width { for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize; let entry = decoded_data[i] as usize;
if entry < palette.len() { if entry < palette.len() {
let color = &palette[entry]; let color = &palette[entry];
out_data.push(color.r); out_data.push(color.r);
out_data.push(color.g); out_data.push(color.g);
out_data.push(color.b); out_data.push(color.b);
out_data.push(color.a); out_data.push(color.a);
} else { } else {
out_data.push(0); out_data.push(0);
out_data.push(0); out_data.push(0);
out_data.push(0); out_data.push(0);
out_data.push(255); out_data.push(255);
} }
i += 1; i += 1;
} }
i += (padded_width - swf_tag.width) as usize; i += (padded_width - swf_tag.width) as usize;
} }
out_data out_data
} }
(2, swf::BitmapFormat::ColorMap8) => { (2, swf::BitmapFormat::ColorMap8) => {
let mut i = 0; let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11; let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1); let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors { for _ in 0..=swf_tag.num_colors {
palette.push(Color { palette.push(Color {
r: decoded_data[i], r: decoded_data[i],
g: decoded_data[i + 1], g: decoded_data[i + 1],
b: decoded_data[i + 2], b: decoded_data[i + 2],
a: decoded_data[i + 3], a: decoded_data[i + 3],
}); });
i += 4; i += 4;
} }
let mut out_data = vec![]; let mut out_data = vec![];
for _ in 0..swf_tag.height { for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width { for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize; let entry = decoded_data[i] as usize;
if entry < palette.len() { if entry < palette.len() {
let color = &palette[entry]; let color = &palette[entry];
out_data.push(color.r); out_data.push(color.r);
out_data.push(color.g); out_data.push(color.g);
out_data.push(color.b); out_data.push(color.b);
out_data.push(color.a); out_data.push(color.a);
} else { } else {
out_data.push(0); out_data.push(0);
out_data.push(0); out_data.push(0);
out_data.push(0); out_data.push(0);
out_data.push(0); out_data.push(0);
} }
i += 1; i += 1;
} }
i += (padded_width - swf_tag.width) as usize; i += (padded_width - swf_tag.width) as usize;
} }
out_data out_data
} }
_ => unimplemented!("{:?} {:?}", swf_tag.version, swf_tag.format), _ => unimplemented!("{:?} {:?}", swf_tag.version, swf_tag.format),
}; };
Ok(out_data) Ok(out_data)
} }
/// Images in SWFs are stored with premultiplied alpha. /// Images in SWFs are stored with premultiplied alpha.
/// Converts RGBA premultiplied alpha to standard RBGA. /// Converts RGBA premultiplied alpha to standard RBGA.
pub fn unmultiply_alpha_rgba(rgba: &mut [u8]) { pub fn unmultiply_alpha_rgba(rgba: &mut [u8]) {
rgba.chunks_exact_mut(4).for_each(|rgba| { rgba.chunks_exact_mut(4).for_each(|rgba| {
if rgba[3] > 0 { if rgba[3] > 0 {
let a = f32::from(rgba[3]) / 255.0; let a = f32::from(rgba[3]) / 255.0;
rgba[0] = (f32::from(rgba[0]) / a) as u8; rgba[0] = (f32::from(rgba[0]) / a) as u8;
rgba[1] = (f32::from(rgba[1]) / a) as u8; rgba[1] = (f32::from(rgba[1]) / a) as u8;
rgba[2] = (f32::from(rgba[2]) / a) as u8; rgba[2] = (f32::from(rgba[2]) / a) as u8;
} }
}) })
} }

View File

@ -1,74 +1,74 @@
use crate::matrix::Matrix; use crate::matrix::Matrix;
use swf::Twips; use swf::Twips;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct BoundingBox { pub struct BoundingBox {
pub x_min: Twips, pub x_min: Twips,
pub y_min: Twips, pub y_min: Twips,
pub x_max: Twips, pub x_max: Twips,
pub y_max: Twips, pub y_max: Twips,
pub valid: bool, pub valid: bool,
} }
impl BoundingBox { impl BoundingBox {
pub fn transform(&self, matrix: &Matrix) -> Self { pub fn transform(&self, matrix: &Matrix) -> Self {
if !self.valid { if !self.valid {
return Self::default(); return Self::default();
} }
use std::cmp::{max, min}; use std::cmp::{max, min};
let pt0 = *matrix * (self.x_min, self.y_min); let pt0 = *matrix * (self.x_min, self.y_min);
let pt1 = *matrix * (self.x_min, self.y_max); let pt1 = *matrix * (self.x_min, self.y_max);
let pt2 = *matrix * (self.x_max, self.y_min); let pt2 = *matrix * (self.x_max, self.y_min);
let pt3 = *matrix * (self.x_max, self.y_max); let pt3 = *matrix * (self.x_max, self.y_max);
BoundingBox { BoundingBox {
x_min: min(pt0.0, min(pt1.0, min(pt2.0, pt3.0))), x_min: min(pt0.0, min(pt1.0, min(pt2.0, pt3.0))),
y_min: min(pt0.1, min(pt1.1, min(pt2.1, pt3.1))), y_min: min(pt0.1, min(pt1.1, min(pt2.1, pt3.1))),
x_max: max(pt0.0, max(pt1.0, max(pt2.0, pt3.0))), x_max: max(pt0.0, max(pt1.0, max(pt2.0, pt3.0))),
y_max: max(pt0.1, max(pt1.1, max(pt2.1, pt3.1))), y_max: max(pt0.1, max(pt1.1, max(pt2.1, pt3.1))),
valid: true, valid: true,
} }
} }
pub fn intersects(&self, other: &BoundingBox) -> bool { pub fn intersects(&self, other: &BoundingBox) -> bool {
if !self.valid || !other.valid { if !self.valid || !other.valid {
return false; return false;
} }
use std::cmp::{max, min}; use std::cmp::{max, min};
let x_min = max(self.x_min, other.x_min); let x_min = max(self.x_min, other.x_min);
let y_min = max(self.y_min, other.y_min); let y_min = max(self.y_min, other.y_min);
let x_max = min(self.x_max, other.x_max); let x_max = min(self.x_max, other.x_max);
let y_max = min(self.y_max, other.y_max); let y_max = min(self.y_max, other.y_max);
x_min <= x_max && y_min <= y_max x_min <= x_max && y_min <= y_max
} }
pub fn contains(&self, (x, y): (Twips, Twips)) -> bool { pub fn contains(&self, (x, y): (Twips, Twips)) -> bool {
self.valid && x >= self.x_min && x <= self.x_max && y >= self.y_min && y <= self.y_max self.valid && x >= self.x_min && x <= self.x_max && y >= self.y_min && y <= self.y_max
} }
} }
impl Default for BoundingBox { impl Default for BoundingBox {
fn default() -> Self { fn default() -> Self {
Self { Self {
x_min: Default::default(), x_min: Default::default(),
y_min: Default::default(), y_min: Default::default(),
x_max: Default::default(), x_max: Default::default(),
y_max: Default::default(), y_max: Default::default(),
valid: false, valid: false,
} }
} }
} }
impl From<swf::Rectangle> for BoundingBox { impl From<swf::Rectangle> for BoundingBox {
fn from(rect: swf::Rectangle) -> Self { fn from(rect: swf::Rectangle) -> Self {
Self { Self {
x_min: rect.x_min, x_min: rect.x_min,
y_min: rect.y_min, y_min: rect.y_min,
x_max: rect.x_max, x_max: rect.x_max,
y_max: rect.y_max, y_max: rect.y_max,
valid: true, valid: true,
} }
} }
} }

View File

@ -1,301 +1,301 @@
use crate::display_object::{DisplayObject, DisplayObjectBase}; use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::events::ButtonEvent; use crate::events::ButtonEvent;
use crate::player::{RenderContext, UpdateContext}; use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*; use crate::prelude::*;
use std::collections::BTreeMap; use std::collections::BTreeMap;
#[derive(Clone)] #[derive(Clone)]
pub struct Button<'gc> { pub struct Button<'gc> {
base: DisplayObjectBase<'gc>, base: DisplayObjectBase<'gc>,
static_data: gc_arena::Gc<'gc, ButtonStatic>, static_data: gc_arena::Gc<'gc, ButtonStatic>,
state: ButtonState, state: ButtonState,
children: [BTreeMap<Depth, DisplayNode<'gc>>; 4], children: [BTreeMap<Depth, DisplayNode<'gc>>; 4],
tracking: ButtonTracking, tracking: ButtonTracking,
} }
const UP_STATE: usize = 0; const UP_STATE: usize = 0;
const OVER_STATE: usize = 1; const OVER_STATE: usize = 1;
const DOWN_STATE: usize = 2; const DOWN_STATE: usize = 2;
const HIT_STATE: usize = 3; const HIT_STATE: usize = 3;
impl<'gc> Button<'gc> { impl<'gc> Button<'gc> {
pub fn from_swf_tag( pub fn from_swf_tag(
button: &swf::Button, button: &swf::Button,
library: &crate::library::Library<'gc>, library: &crate::library::Library<'gc>,
gc_context: gc_arena::MutationContext<'gc, '_>, gc_context: gc_arena::MutationContext<'gc, '_>,
) -> Self { ) -> Self {
use swf::ButtonState; use swf::ButtonState;
let mut children = [ let mut children = [
BTreeMap::new(), BTreeMap::new(),
BTreeMap::new(), BTreeMap::new(),
BTreeMap::new(), BTreeMap::new(),
BTreeMap::new(), BTreeMap::new(),
]; ];
for record in &button.records { for record in &button.records {
match library.instantiate_display_object(record.id, gc_context) { match library.instantiate_display_object(record.id, gc_context) {
Ok(child) => { Ok(child) => {
child child
.write(gc_context) .write(gc_context)
.set_matrix(&record.matrix.clone().into()); .set_matrix(&record.matrix.clone().into());
child child
.write(gc_context) .write(gc_context)
.set_color_transform(&record.color_transform.clone().into()); .set_color_transform(&record.color_transform.clone().into());
for state in &record.states { for state in &record.states {
let i = match state { let i = match state {
ButtonState::Up => UP_STATE, ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE, ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE, ButtonState::Down => DOWN_STATE,
ButtonState::HitTest => HIT_STATE, ButtonState::HitTest => HIT_STATE,
}; };
children[i].insert(record.depth, child); children[i].insert(record.depth, child);
} }
} }
Err(error) => { Err(error) => {
log::error!( log::error!(
"Button ID {}: could not instantiate child ID {}: {}", "Button ID {}: could not instantiate child ID {}: {}",
button.id, button.id,
record.id, record.id,
error error
); );
} }
} }
} }
let mut actions = vec![]; let mut actions = vec![];
for action in &button.actions { for action in &button.actions {
let action_data = crate::tag_utils::SwfSlice { let action_data = crate::tag_utils::SwfSlice {
data: std::sync::Arc::new(action.action_data.clone()), data: std::sync::Arc::new(action.action_data.clone()),
start: 0, start: 0,
end: action.action_data.len(), end: action.action_data.len(),
}; };
for condition in &action.conditions { for condition in &action.conditions {
let button_action = ButtonAction { let button_action = ButtonAction {
action_data: action_data.clone(), action_data: action_data.clone(),
condition: *condition, condition: *condition,
key_code: action.key_code, key_code: action.key_code,
}; };
actions.push(button_action); actions.push(button_action);
} }
} }
let static_data = ButtonStatic { let static_data = ButtonStatic {
id: button.id, id: button.id,
actions, actions,
}; };
Button { Button {
base: Default::default(), base: Default::default(),
static_data: gc_arena::Gc::allocate(gc_context, static_data), static_data: gc_arena::Gc::allocate(gc_context, static_data),
children, children,
state: self::ButtonState::Up, state: self::ButtonState::Up,
tracking: if button.is_track_as_menu { tracking: if button.is_track_as_menu {
ButtonTracking::Menu ButtonTracking::Menu
} else { } else {
ButtonTracking::Push ButtonTracking::Push
}, },
} }
} }
fn children_in_state( fn children_in_state(
&self, &self,
state: ButtonState, state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &DisplayNode<'gc>> { ) -> impl std::iter::DoubleEndedIterator<Item = &DisplayNode<'gc>> {
let i = match state { let i = match state {
ButtonState::Up => UP_STATE, ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE, ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE, ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE, ButtonState::Hit => HIT_STATE,
}; };
self.children[i].values() self.children[i].values()
} }
fn children_in_state_mut( fn children_in_state_mut(
&mut self, &mut self,
state: ButtonState, state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &mut DisplayNode<'gc>> { ) -> impl std::iter::DoubleEndedIterator<Item = &mut DisplayNode<'gc>> {
let i = match state { let i = match state {
ButtonState::Up => UP_STATE, ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE, ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE, ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE, ButtonState::Hit => HIT_STATE,
}; };
self.children[i].values_mut() self.children[i].values_mut()
} }
pub fn handle_button_event( pub fn handle_button_event(
&mut self, &mut self,
context: &mut crate::player::UpdateContext<'_, 'gc, '_>, context: &mut crate::player::UpdateContext<'_, 'gc, '_>,
event: ButtonEvent, event: ButtonEvent,
) { ) {
let new_state = match event { let new_state = match event {
ButtonEvent::RollOut => ButtonState::Up, ButtonEvent::RollOut => ButtonState::Up,
ButtonEvent::RollOver => ButtonState::Over, ButtonEvent::RollOver => ButtonState::Over,
ButtonEvent::Press => ButtonState::Down, ButtonEvent::Press => ButtonState::Down,
ButtonEvent::Release => ButtonState::Over, ButtonEvent::Release => ButtonState::Over,
ButtonEvent::KeyPress(key) => { ButtonEvent::KeyPress(key) => {
self.run_actions(context, swf::ButtonActionCondition::KeyPress, Some(key)); self.run_actions(context, swf::ButtonActionCondition::KeyPress, Some(key));
self.state self.state
} }
}; };
match (self.state, new_state) { match (self.state, new_state) {
(ButtonState::Up, ButtonState::Over) => { (ButtonState::Up, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::IdleToOverUp, None); self.run_actions(context, swf::ButtonActionCondition::IdleToOverUp, None);
} }
(ButtonState::Over, ButtonState::Up) => { (ButtonState::Over, ButtonState::Up) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToIdle, None); self.run_actions(context, swf::ButtonActionCondition::OverUpToIdle, None);
} }
(ButtonState::Over, ButtonState::Down) => { (ButtonState::Over, ButtonState::Down) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToOverDown, None); self.run_actions(context, swf::ButtonActionCondition::OverUpToOverDown, None);
} }
(ButtonState::Down, ButtonState::Over) => { (ButtonState::Down, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::OverDownToOverUp, None); self.run_actions(context, swf::ButtonActionCondition::OverDownToOverUp, None);
} }
_ => (), _ => (),
} }
self.state = new_state; self.state = new_state;
} }
fn run_actions( fn run_actions(
&mut self, &mut self,
context: &mut UpdateContext<'_, 'gc, '_>, context: &mut UpdateContext<'_, 'gc, '_>,
condition: swf::ButtonActionCondition, condition: swf::ButtonActionCondition,
key_code: Option<u8>, key_code: Option<u8>,
) { ) {
if let Some(parent) = self.parent() { if let Some(parent) = self.parent() {
for action in &self.static_data.actions { for action in &self.static_data.actions {
if action.condition == condition && action.key_code == key_code { if action.condition == condition && action.key_code == key_code {
// Note that AVM1 buttons run actions relative to their parent, not themselves. // Note that AVM1 buttons run actions relative to their parent, not themselves.
context.actions.push((parent, action.action_data.clone())); context.actions.push((parent, action.action_data.clone()));
} }
} }
} }
} }
} }
impl<'gc> DisplayObject<'gc> for Button<'gc> { impl<'gc> DisplayObject<'gc> for Button<'gc> {
impl_display_object!(base); impl_display_object!(base);
fn run_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) { fn run_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
// TODO: Set parent for all children. Yuck... Do this on creation instead. // TODO: Set parent for all children. Yuck... Do this on creation instead.
for state in &mut self.children { for state in &mut self.children {
for child in state.values_mut() { for child in state.values_mut() {
child child
.write(context.gc_context) .write(context.gc_context)
.set_parent(Some(context.active_clip)); .set_parent(Some(context.active_clip));
} }
} }
for child in self.children_in_state_mut(self.state) { for child in self.children_in_state_mut(self.state) {
child child
.write(context.gc_context) .write(context.gc_context)
.set_parent(Some(context.active_clip)); .set_parent(Some(context.active_clip));
context.active_clip = *child; context.active_clip = *child;
child.write(context.gc_context).run_frame(context); child.write(context.gc_context).run_frame(context);
} }
} }
fn run_post_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) { fn run_post_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
for child in self.children_in_state_mut(self.state) { for child in self.children_in_state_mut(self.state) {
context.active_clip = *child; context.active_clip = *child;
child.write(context.gc_context).run_post_frame(context); child.write(context.gc_context).run_post_frame(context);
} }
} }
fn render(&self, context: &mut RenderContext<'_, 'gc>) { fn render(&self, context: &mut RenderContext<'_, 'gc>) {
context.transform_stack.push(self.transform()); context.transform_stack.push(self.transform());
for child in self.children_in_state(self.state) { for child in self.children_in_state(self.state) {
child.read().render(context); child.read().render(context);
} }
context.transform_stack.pop(); context.transform_stack.pop();
} }
fn hit_test(&self, point: (Twips, Twips)) -> bool { fn hit_test(&self, point: (Twips, Twips)) -> bool {
// Use hit state to determine hit area; otherwise use current state. // Use hit state to determine hit area; otherwise use current state.
let hit_state = if !self.children[HIT_STATE].is_empty() { let hit_state = if !self.children[HIT_STATE].is_empty() {
ButtonState::Hit ButtonState::Hit
} else { } else {
self.state self.state
}; };
for child in self.children_in_state(hit_state).rev() { for child in self.children_in_state(hit_state).rev() {
if child.read().world_bounds().contains(point) { if child.read().world_bounds().contains(point) {
return true; return true;
} }
} }
false false
} }
fn mouse_pick( fn mouse_pick(
&self, &self,
self_node: DisplayNode<'gc>, self_node: DisplayNode<'gc>,
point: (Twips, Twips), point: (Twips, Twips),
) -> Option<DisplayNode<'gc>> { ) -> Option<DisplayNode<'gc>> {
// The button is hovered if the mouse is over any child nodes. // The button is hovered if the mouse is over any child nodes.
if self.hit_test(point) { if self.hit_test(point) {
Some(self_node) Some(self_node)
} else { } else {
None None
} }
} }
fn as_button(&self) -> Option<&Self> { fn as_button(&self) -> Option<&Self> {
Some(self) Some(self)
} }
fn as_button_mut(&mut self) -> Option<&mut Self> { fn as_button_mut(&mut self) -> Option<&mut Self> {
Some(self) Some(self)
} }
} }
unsafe impl<'gc> gc_arena::Collect for Button<'gc> { unsafe impl<'gc> gc_arena::Collect for Button<'gc> {
#[inline] #[inline]
fn trace(&self, cc: gc_arena::CollectionContext) { fn trace(&self, cc: gc_arena::CollectionContext) {
for state in &self.children { for state in &self.children {
for child in state.values() { for child in state.values() {
child.trace(cc); child.trace(cc);
} }
} }
self.base.trace(cc); self.base.trace(cc);
self.static_data.trace(cc); self.static_data.trace(cc);
} }
} }
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
enum ButtonState { enum ButtonState {
Up, Up,
Over, Over,
Down, Down,
Hit, Hit,
} }
#[derive(Clone)] #[derive(Clone)]
struct ButtonAction { struct ButtonAction {
action_data: crate::tag_utils::SwfSlice, action_data: crate::tag_utils::SwfSlice,
condition: swf::ButtonActionCondition, condition: swf::ButtonActionCondition,
key_code: Option<u8>, key_code: Option<u8>,
} }
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ButtonTracking { enum ButtonTracking {
Push, Push,
Menu, Menu,
} }
/// Static data shared between all instances of a button. /// Static data shared between all instances of a button.
#[allow(dead_code)] #[allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
struct ButtonStatic { struct ButtonStatic {
id: CharacterId, id: CharacterId,
actions: Vec<ButtonAction>, actions: Vec<ButtonAction>,
} }
unsafe impl<'gc> gc_arena::Collect for ButtonStatic { unsafe impl<'gc> gc_arena::Collect for ButtonStatic {
#[inline] #[inline]
fn needs_trace() -> bool { fn needs_trace() -> bool {
false false
} }
} }

View File

@ -1,88 +1,88 @@
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct ColorTransform { pub struct ColorTransform {
pub r_mult: f32, pub r_mult: f32,
pub g_mult: f32, pub g_mult: f32,
pub b_mult: f32, pub b_mult: f32,
pub a_mult: f32, pub a_mult: f32,
pub r_add: f32, pub r_add: f32,
pub g_add: f32, pub g_add: f32,
pub b_add: f32, pub b_add: f32,
pub a_add: f32, pub a_add: f32,
} }
impl From<swf::ColorTransform> for ColorTransform { impl From<swf::ColorTransform> for ColorTransform {
fn from(color_transform: swf::ColorTransform) -> ColorTransform { fn from(color_transform: swf::ColorTransform) -> ColorTransform {
ColorTransform { ColorTransform {
r_mult: color_transform.r_multiply, r_mult: color_transform.r_multiply,
g_mult: color_transform.g_multiply, g_mult: color_transform.g_multiply,
b_mult: color_transform.b_multiply, b_mult: color_transform.b_multiply,
a_mult: color_transform.a_multiply, a_mult: color_transform.a_multiply,
r_add: f32::from(color_transform.r_add) / 255.0, r_add: f32::from(color_transform.r_add) / 255.0,
g_add: f32::from(color_transform.g_add) / 255.0, g_add: f32::from(color_transform.g_add) / 255.0,
b_add: f32::from(color_transform.b_add) / 255.0, b_add: f32::from(color_transform.b_add) / 255.0,
a_add: f32::from(color_transform.a_add) / 255.0, a_add: f32::from(color_transform.a_add) / 255.0,
} }
} }
} }
impl ColorTransform { impl ColorTransform {
#[allow(clippy::float_cmp)] #[allow(clippy::float_cmp)]
pub fn is_identity(&self) -> bool { pub fn is_identity(&self) -> bool {
self.r_mult == 1.0 self.r_mult == 1.0
&& self.g_mult == 1.0 && self.g_mult == 1.0
&& self.b_mult == 1.0 && self.b_mult == 1.0
&& self.a_mult == 1.0 && self.a_mult == 1.0
&& self.r_add == 0.0 && self.r_add == 0.0
&& self.g_add == 0.0 && self.g_add == 0.0
&& self.b_add == 0.0 && self.b_add == 0.0
&& self.a_add == 0.0 && self.a_add == 0.0
} }
} }
impl std::default::Default for ColorTransform { impl std::default::Default for ColorTransform {
fn default() -> ColorTransform { fn default() -> ColorTransform {
ColorTransform { ColorTransform {
r_mult: 1.0, r_mult: 1.0,
b_mult: 1.0, b_mult: 1.0,
g_mult: 1.0, g_mult: 1.0,
a_mult: 1.0, a_mult: 1.0,
r_add: 0.0, r_add: 0.0,
b_add: 0.0, b_add: 0.0,
g_add: 0.0, g_add: 0.0,
a_add: 0.0, a_add: 0.0,
} }
} }
} }
impl std::ops::Mul for ColorTransform { impl std::ops::Mul for ColorTransform {
type Output = Self; type Output = Self;
fn mul(self, rhs: Self) -> Self { fn mul(self, rhs: Self) -> Self {
ColorTransform { ColorTransform {
r_mult: self.r_mult * rhs.r_mult, r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult, g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult, b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult, a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add, r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.g_add + self.g_add, g_add: self.g_mult * rhs.g_add + self.g_add,
b_add: self.b_mult * rhs.b_add + self.b_add, b_add: self.b_mult * rhs.b_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add, a_add: self.a_mult * rhs.a_add + self.a_add,
} }
} }
} }
impl std::ops::MulAssign for ColorTransform { impl std::ops::MulAssign for ColorTransform {
fn mul_assign(&mut self, rhs: Self) { fn mul_assign(&mut self, rhs: Self) {
*self = ColorTransform { *self = ColorTransform {
r_mult: self.r_mult * rhs.r_mult, r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult, g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult, b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult, a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add, r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.b_add + self.g_add, g_add: self.g_mult * rhs.b_add + self.g_add,
b_add: self.b_mult * rhs.g_add + self.b_add, b_add: self.b_mult * rhs.g_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add, a_add: self.a_mult * rhs.a_add + self.a_add,
} }
} }
} }

View File

@ -1,29 +1,29 @@
#[allow(clippy::enum_variant_names)] #[allow(clippy::enum_variant_names)]
#[derive(Debug)] #[derive(Debug)]
pub enum PlayerEvent { pub enum PlayerEvent {
MouseMove { x: f64, y: f64 }, MouseMove { x: f64, y: f64 },
MouseUp { x: f64, y: f64 }, MouseUp { x: f64, y: f64 },
MouseDown { x: f64, y: f64 }, MouseDown { x: f64, y: f64 },
MouseLeft, MouseLeft,
} }
/// The events that an AVM1 button can fire. /// The events that an AVM1 button can fire.
/// ///
/// In Flash, these are created using `on` code on the button instance: /// In Flash, these are created using `on` code on the button instance:
/// ```ignore /// ```ignore
/// on(release) { /// on(release) {
/// trace("Button clicked"); /// trace("Button clicked");
/// } /// }
/// ``` /// ```
#[derive(Debug)] #[derive(Debug)]
pub enum ButtonEvent { pub enum ButtonEvent {
Press, Press,
Release, Release,
RollOut, RollOut,
RollOver, RollOver,
KeyPress(KeyCode), KeyPress(KeyCode),
} }
/// Flash virtual keycode. /// Flash virtual keycode.
/// TODO: This will eventually move to a separate module. /// TODO: This will eventually move to a separate module.
pub type KeyCode = u8; pub type KeyCode = u8;

View File

@ -1,411 +1,411 @@
use crate::backend::render::{RenderBackend, ShapeHandle}; use crate::backend::render::{RenderBackend, ShapeHandle};
use crate::display_object::{DisplayObject, DisplayObjectBase}; use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::player::{RenderContext, UpdateContext}; use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*; use crate::prelude::*;
use std::collections::HashMap; use std::collections::HashMap;
use swf::Twips; use swf::Twips;
#[derive(Clone)] #[derive(Clone)]
pub struct MorphShape<'gc> { pub struct MorphShape<'gc> {
base: DisplayObjectBase<'gc>, base: DisplayObjectBase<'gc>,
start: swf::MorphShape, start: swf::MorphShape,
end: swf::MorphShape, end: swf::MorphShape,
frames: HashMap<u16, ShapeHandle>, frames: HashMap<u16, ShapeHandle>,
ratio: u16, ratio: u16,
} }
impl<'gc> MorphShape<'gc> { impl<'gc> MorphShape<'gc> {
pub fn from_swf_tag(swf_tag: &swf::DefineMorphShape, renderer: &mut dyn RenderBackend) -> Self { pub fn from_swf_tag(swf_tag: &swf::DefineMorphShape, renderer: &mut dyn RenderBackend) -> Self {
// Convert the MorphShape into a normal Shape. // Convert the MorphShape into a normal Shape.
// TODO(Herschel): impl From in swf crate? // TODO(Herschel): impl From in swf crate?
let mut morph_shape = Self { let mut morph_shape = Self {
start: swf_tag.start.clone(), start: swf_tag.start.clone(),
end: swf_tag.end.clone(), end: swf_tag.end.clone(),
base: Default::default(), base: Default::default(),
frames: HashMap::new(), frames: HashMap::new(),
ratio: 0, ratio: 0,
}; };
morph_shape.register_ratio(renderer, 0); morph_shape.register_ratio(renderer, 0);
morph_shape.register_ratio(renderer, 65535); morph_shape.register_ratio(renderer, 65535);
morph_shape morph_shape
} }
pub fn register_ratio(&mut self, renderer: &mut dyn RenderBackend, ratio: u16) { pub fn register_ratio(&mut self, renderer: &mut dyn RenderBackend, ratio: u16) {
if self.frames.contains_key(&ratio) { if self.frames.contains_key(&ratio) {
// Already registered. // Already registered.
return; return;
} }
// Interpolate MorphShapes into a Shape. // Interpolate MorphShapes into a Shape.
use swf::{FillStyle, Gradient, LineStyle, ShapeRecord, ShapeStyles}; use swf::{FillStyle, Gradient, LineStyle, ShapeRecord, ShapeStyles};
// Start shape is ratio 65535, end shape is ratio 0. // Start shape is ratio 65535, end shape is ratio 0.
let b = f32::from(ratio) / 65535.0; let b = f32::from(ratio) / 65535.0;
let a = 1.0 - b; let a = 1.0 - b;
let fill_styles: Vec<FillStyle> = self let fill_styles: Vec<FillStyle> = self
.start .start
.fill_styles .fill_styles
.iter() .iter()
.zip(self.end.fill_styles.iter()) .zip(self.end.fill_styles.iter())
.map(|(start, end)| match (start, end) { .map(|(start, end)| match (start, end) {
(FillStyle::Color(start), FillStyle::Color(end)) => FillStyle::Color(Color { (FillStyle::Color(start), FillStyle::Color(end)) => FillStyle::Color(Color {
r: (a * f32::from(start.r) + b * f32::from(end.r)) as u8, r: (a * f32::from(start.r) + b * f32::from(end.r)) as u8,
g: (a * f32::from(start.g) + b * f32::from(end.g)) as u8, g: (a * f32::from(start.g) + b * f32::from(end.g)) as u8,
b: (a * f32::from(start.b) + b * f32::from(end.b)) as u8, b: (a * f32::from(start.b) + b * f32::from(end.b)) as u8,
a: (a * f32::from(start.a) + b * f32::from(end.a)) as u8, a: (a * f32::from(start.a) + b * f32::from(end.a)) as u8,
}), }),
(FillStyle::LinearGradient(start), FillStyle::LinearGradient(end)) => { (FillStyle::LinearGradient(start), FillStyle::LinearGradient(end)) => {
let records: Vec<swf::GradientRecord> = start let records: Vec<swf::GradientRecord> = start
.records .records
.iter() .iter()
.zip(end.records.iter()) .zip(end.records.iter())
.map(|(start, end)| swf::GradientRecord { .map(|(start, end)| swf::GradientRecord {
ratio: (f32::from(start.ratio) * a + f32::from(end.ratio) * b) as u8, ratio: (f32::from(start.ratio) * a + f32::from(end.ratio) * b) as u8,
color: Color { color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r)) r: (a * f32::from(start.color.r) + b * f32::from(end.color.r))
as u8, as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g)) g: (a * f32::from(start.color.g) + b * f32::from(end.color.g))
as u8, as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b)) b: (a * f32::from(start.color.b) + b * f32::from(end.color.b))
as u8, as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a)) a: (a * f32::from(start.color.a) + b * f32::from(end.color.a))
as u8, as u8,
}, },
}) })
.collect(); .collect();
FillStyle::LinearGradient(Gradient { FillStyle::LinearGradient(Gradient {
matrix: start.matrix.clone(), matrix: start.matrix.clone(),
spread: start.spread, spread: start.spread,
interpolation: start.interpolation, interpolation: start.interpolation,
records, records,
}) })
} }
_ => { _ => {
log::info!("Unhandled morph shape combination: {:?} {:?}", start, end); log::info!("Unhandled morph shape combination: {:?} {:?}", start, end);
start.clone() start.clone()
} }
}) })
.collect(); .collect();
let line_styles: Vec<LineStyle> = self let line_styles: Vec<LineStyle> = self
.start .start
.line_styles .line_styles
.iter() .iter()
.zip(self.end.line_styles.iter()) .zip(self.end.line_styles.iter())
.map(|(start, end)| LineStyle { .map(|(start, end)| LineStyle {
width: Twips::new( width: Twips::new(
((start.width.get() as f32) * a + (end.width.get() as f32) * b) as i32, ((start.width.get() as f32) * a + (end.width.get() as f32) * b) as i32,
), ),
color: Color { color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r)) as u8, r: (a * f32::from(start.color.r) + b * f32::from(end.color.r)) as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g)) as u8, g: (a * f32::from(start.color.g) + b * f32::from(end.color.g)) as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b)) as u8, b: (a * f32::from(start.color.b) + b * f32::from(end.color.b)) as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a)) as u8, a: (a * f32::from(start.color.a) + b * f32::from(end.color.a)) as u8,
}, },
start_cap: start.start_cap, start_cap: start.start_cap,
end_cap: start.end_cap, end_cap: start.end_cap,
join_style: start.join_style, join_style: start.join_style,
fill_style: None, fill_style: None,
allow_scale_x: start.allow_scale_x, allow_scale_x: start.allow_scale_x,
allow_scale_y: start.allow_scale_y, allow_scale_y: start.allow_scale_y,
is_pixel_hinted: start.is_pixel_hinted, is_pixel_hinted: start.is_pixel_hinted,
allow_close: start.allow_close, allow_close: start.allow_close,
}) })
.collect(); .collect();
let mut shape = Vec::with_capacity(self.start.shape.len()); let mut shape = Vec::with_capacity(self.start.shape.len());
let mut start_iter = self.start.shape.iter(); let mut start_iter = self.start.shape.iter();
let mut end_iter = self.end.shape.iter(); let mut end_iter = self.end.shape.iter();
let mut start = start_iter.next(); let mut start = start_iter.next();
let mut end = end_iter.next(); let mut end = end_iter.next();
let mut start_x = Twips::new(0); let mut start_x = Twips::new(0);
let mut start_y = Twips::new(0); let mut start_y = Twips::new(0);
let mut end_x = Twips::new(0); let mut end_x = Twips::new(0);
let mut end_y = Twips::new(0); let mut end_y = Twips::new(0);
// TODO: Feels like this could be cleaned up a bit. // TODO: Feels like this could be cleaned up a bit.
// We step through both the start records and end records, interpolating edges pairwise. // We step through both the start records and end records, interpolating edges pairwise.
// Fill style/line style changes should only appear in the start records. // Fill style/line style changes should only appear in the start records.
// However, StyleChangeRecord move_to can appear it both start and end records, // However, StyleChangeRecord move_to can appear it both start and end records,
// and not necessarily in matching pairs; therefore, we have to keep track of the pen position // and not necessarily in matching pairs; therefore, we have to keep track of the pen position
// in case one side is missing a move_to; it will implicitly use the last pen position. // in case one side is missing a move_to; it will implicitly use the last pen position.
while let (Some(s), Some(e)) = (start, end) { while let (Some(s), Some(e)) = (start, end) {
match (s, e) { match (s, e) {
(ShapeRecord::StyleChange(start_change), ShapeRecord::StyleChange(end_change)) => { (ShapeRecord::StyleChange(start_change), ShapeRecord::StyleChange(end_change)) => {
let mut style_change = start_change.clone(); let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to { if let Some((s_x, s_y)) = start_change.move_to {
if let Some((e_x, e_y)) = end_change.move_to { if let Some((e_x, e_y)) = end_change.move_to {
start_x = s_x; start_x = s_x;
start_y = s_y; start_y = s_y;
end_x = e_x; end_x = e_x;
end_y = e_y; end_y = e_y;
style_change.move_to = Some(( style_change.move_to = Some((
Twips::new( Twips::new(
(start_x.get() as f32 * a + end_x.get() as f32 * b) as i32, (start_x.get() as f32 * a + end_x.get() as f32 * b) as i32,
), ),
Twips::new( Twips::new(
(start_y.get() as f32 * a + end_y.get() as f32 * b) as i32, (start_y.get() as f32 * a + end_y.get() as f32 * b) as i32,
), ),
)); ));
} else { } else {
panic!("Expected move_to for morph shape") panic!("Expected move_to for morph shape")
} }
} }
shape.push(ShapeRecord::StyleChange(style_change)); shape.push(ShapeRecord::StyleChange(style_change));
start = start_iter.next(); start = start_iter.next();
end = end_iter.next(); end = end_iter.next();
} }
(ShapeRecord::StyleChange(start_change), _) => { (ShapeRecord::StyleChange(start_change), _) => {
let mut style_change = start_change.clone(); let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to { if let Some((s_x, s_y)) = start_change.move_to {
start_x = s_x; start_x = s_x;
start_y = s_y; start_y = s_y;
style_change.move_to = Some(( style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32), Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32), Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
)); ));
} }
shape.push(ShapeRecord::StyleChange(style_change)); shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut start_x, &mut start_y, s); Self::update_pos(&mut start_x, &mut start_y, s);
start = start_iter.next(); start = start_iter.next();
} }
(_, ShapeRecord::StyleChange(end_change)) => { (_, ShapeRecord::StyleChange(end_change)) => {
let mut style_change = end_change.clone(); let mut style_change = end_change.clone();
if let Some((e_x, e_y)) = end_change.move_to { if let Some((e_x, e_y)) = end_change.move_to {
end_x = e_x; end_x = e_x;
end_y = e_y; end_y = e_y;
style_change.move_to = Some(( style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32), Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32), Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
)); ));
} }
shape.push(ShapeRecord::StyleChange(style_change)); shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut end_x, &mut end_y, s); Self::update_pos(&mut end_x, &mut end_y, s);
end = end_iter.next(); end = end_iter.next();
continue; continue;
} }
_ => { _ => {
shape.push(Self::interpolate_edges(s, e, a)); shape.push(Self::interpolate_edges(s, e, a));
Self::update_pos(&mut start_x, &mut start_y, s); Self::update_pos(&mut start_x, &mut start_y, s);
Self::update_pos(&mut end_x, &mut end_y, e); Self::update_pos(&mut end_x, &mut end_y, e);
start = start_iter.next(); start = start_iter.next();
end = end_iter.next(); end = end_iter.next();
} }
} }
} }
let styles = ShapeStyles { let styles = ShapeStyles {
fill_styles, fill_styles,
line_styles, line_styles,
}; };
let bounds = crate::shape_utils::calculate_shape_bounds(&shape[..]); let bounds = crate::shape_utils::calculate_shape_bounds(&shape[..]);
let shape = swf::Shape { let shape = swf::Shape {
version: 4, version: 4,
id: 0, id: 0,
shape_bounds: bounds.clone(), shape_bounds: bounds.clone(),
edge_bounds: bounds, edge_bounds: bounds,
has_fill_winding_rule: false, has_fill_winding_rule: false,
has_non_scaling_strokes: false, has_non_scaling_strokes: false,
has_scaling_strokes: true, has_scaling_strokes: true,
styles, styles,
shape, shape,
}; };
let shape_handle = renderer.register_shape(&shape); let shape_handle = renderer.register_shape(&shape);
self.frames.insert(ratio, shape_handle); self.frames.insert(ratio, shape_handle);
} }
fn update_pos(x: &mut Twips, y: &mut Twips, record: &swf::ShapeRecord) { fn update_pos(x: &mut Twips, y: &mut Twips, record: &swf::ShapeRecord) {
use swf::ShapeRecord; use swf::ShapeRecord;
match record { match record {
ShapeRecord::StraightEdge { delta_x, delta_y } => { ShapeRecord::StraightEdge { delta_x, delta_y } => {
*x += *delta_x; *x += *delta_x;
*y += *delta_y; *y += *delta_y;
} }
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x, control_delta_x,
control_delta_y, control_delta_y,
anchor_delta_x, anchor_delta_x,
anchor_delta_y, anchor_delta_y,
} => { } => {
*x += *control_delta_x + *anchor_delta_x; *x += *control_delta_x + *anchor_delta_x;
*y += *control_delta_y + *anchor_delta_y; *y += *control_delta_y + *anchor_delta_y;
} }
ShapeRecord::StyleChange(ref style_change) => { ShapeRecord::StyleChange(ref style_change) => {
if let Some((move_x, move_y)) = style_change.move_to { if let Some((move_x, move_y)) = style_change.move_to {
*x = move_x; *x = move_x;
*y = move_y; *y = move_y;
} }
} }
} }
} }
fn interpolate_edges( fn interpolate_edges(
start: &swf::ShapeRecord, start: &swf::ShapeRecord,
end: &swf::ShapeRecord, end: &swf::ShapeRecord,
a: f32, a: f32,
) -> swf::ShapeRecord { ) -> swf::ShapeRecord {
use swf::ShapeRecord; use swf::ShapeRecord;
let b = 1.0 - a; let b = 1.0 - a;
match (start, end) { match (start, end) {
( (
ShapeRecord::StraightEdge { ShapeRecord::StraightEdge {
delta_x: start_dx, delta_x: start_dx,
delta_y: start_dy, delta_y: start_dy,
}, },
ShapeRecord::StraightEdge { ShapeRecord::StraightEdge {
delta_x: end_dx, delta_x: end_dx,
delta_y: end_dy, delta_y: end_dy,
}, },
) => ShapeRecord::StraightEdge { ) => ShapeRecord::StraightEdge {
delta_x: Twips::new((start_dx.get() as f32 * a + end_dx.get() as f32 * b) as i32), delta_x: Twips::new((start_dx.get() as f32 * a + end_dx.get() as f32 * b) as i32),
delta_y: Twips::new((start_dy.get() as f32 * a + end_dy.get() as f32 * b) as i32), delta_y: Twips::new((start_dy.get() as f32 * a + end_dy.get() as f32 * b) as i32),
}, },
( (
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: start_cdx, control_delta_x: start_cdx,
control_delta_y: start_cdy, control_delta_y: start_cdy,
anchor_delta_x: start_adx, anchor_delta_x: start_adx,
anchor_delta_y: start_ady, anchor_delta_y: start_ady,
}, },
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: end_cdx, control_delta_x: end_cdx,
control_delta_y: end_cdy, control_delta_y: end_cdy,
anchor_delta_x: end_adx, anchor_delta_x: end_adx,
anchor_delta_y: end_ady, anchor_delta_y: end_ady,
}, },
) => ShapeRecord::CurvedEdge { ) => ShapeRecord::CurvedEdge {
control_delta_x: Twips::new( control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32, (start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
), ),
control_delta_y: Twips::new( control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32, (start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
), ),
anchor_delta_x: Twips::new( anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32, (start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
), ),
anchor_delta_y: Twips::new( anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32, (start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
), ),
}, },
( (
ShapeRecord::StraightEdge { ShapeRecord::StraightEdge {
delta_x: start_dx, delta_x: start_dx,
delta_y: start_dy, delta_y: start_dy,
}, },
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: end_cdx, control_delta_x: end_cdx,
control_delta_y: end_cdy, control_delta_y: end_cdy,
anchor_delta_x: end_adx, anchor_delta_x: end_adx,
anchor_delta_y: end_ady, anchor_delta_y: end_ady,
}, },
) => { ) => {
let start_cdx = *start_dx / 2; let start_cdx = *start_dx / 2;
let start_cdy = *start_dy / 2; let start_cdy = *start_dy / 2;
let start_adx = start_cdx; let start_adx = start_cdx;
let start_ady = start_cdy; let start_ady = start_cdy;
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: Twips::new( control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32, (start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
), ),
control_delta_y: Twips::new( control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32, (start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
), ),
anchor_delta_x: Twips::new( anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32, (start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
), ),
anchor_delta_y: Twips::new( anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32, (start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
), ),
} }
} }
( (
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: start_cdx, control_delta_x: start_cdx,
control_delta_y: start_cdy, control_delta_y: start_cdy,
anchor_delta_x: start_adx, anchor_delta_x: start_adx,
anchor_delta_y: start_ady, anchor_delta_y: start_ady,
}, },
ShapeRecord::StraightEdge { ShapeRecord::StraightEdge {
delta_x: end_dx, delta_x: end_dx,
delta_y: end_dy, delta_y: end_dy,
}, },
) => { ) => {
let end_cdx = *end_dx / 2; let end_cdx = *end_dx / 2;
let end_cdy = *end_dy / 2; let end_cdy = *end_dy / 2;
let end_adx = end_cdx; let end_adx = end_cdx;
let end_ady = end_cdy; let end_ady = end_cdy;
ShapeRecord::CurvedEdge { ShapeRecord::CurvedEdge {
control_delta_x: Twips::new( control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32, (start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
), ),
control_delta_y: Twips::new( control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32, (start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
), ),
anchor_delta_x: Twips::new( anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32, (start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
), ),
anchor_delta_y: Twips::new( anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32, (start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
), ),
} }
} }
_ => unreachable!("{:?} {:?}", start, end), _ => unreachable!("{:?} {:?}", start, end),
} }
} }
pub fn ratio(&self) -> u16 { pub fn ratio(&self) -> u16 {
self.ratio self.ratio
} }
pub fn set_ratio(&mut self, ratio: u16) { pub fn set_ratio(&mut self, ratio: u16) {
self.ratio = ratio; self.ratio = ratio;
} }
} }
impl<'gc> DisplayObject<'gc> for MorphShape<'gc> { impl<'gc> DisplayObject<'gc> for MorphShape<'gc> {
impl_display_object!(base); impl_display_object!(base);
fn as_morph_shape(&self) -> Option<&Self> { fn as_morph_shape(&self) -> Option<&Self> {
Some(self) Some(self)
} }
fn as_morph_shape_mut(&mut self) -> Option<&mut Self> { fn as_morph_shape_mut(&mut self) -> Option<&mut Self> {
Some(self) Some(self)
} }
fn run_frame(&mut self, context: &mut UpdateContext) { fn run_frame(&mut self, context: &mut UpdateContext) {
if !self.frames.contains_key(&self.ratio) { if !self.frames.contains_key(&self.ratio) {
self.register_ratio(context.renderer, self.ratio); self.register_ratio(context.renderer, self.ratio);
} }
} }
fn render(&self, context: &mut RenderContext) { fn render(&self, context: &mut RenderContext) {
context.transform_stack.push(self.transform()); context.transform_stack.push(self.transform());
if let Some(shape) = self.frames.get(&self.ratio) { if let Some(shape) = self.frames.get(&self.ratio) {
context context
.renderer .renderer
.render_shape(*shape, context.transform_stack.transform()); .render_shape(*shape, context.transform_stack.transform());
} else { } else {
warn!("Missing ratio for morph shape"); warn!("Missing ratio for morph shape");
} }
context.transform_stack.pop(); context.transform_stack.pop();
} }
} }
unsafe impl<'gc> gc_arena::Collect for MorphShape<'gc> { unsafe impl<'gc> gc_arena::Collect for MorphShape<'gc> {
#[inline] #[inline]
fn trace(&self, cc: gc_arena::CollectionContext) { fn trace(&self, cc: gc_arena::CollectionContext) {
self.base.trace(cc); self.base.trace(cc);
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,52 +1,52 @@
use swf::TagCode; use swf::TagCode;
pub type DecodeResult = Result<(), Box<dyn std::error::Error>>; pub type DecodeResult = Result<(), Box<dyn std::error::Error>>;
pub type SwfStream<R> = swf::read::Reader<std::io::Cursor<R>>; pub type SwfStream<R> = swf::read::Reader<std::io::Cursor<R>>;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct SwfSlice { pub struct SwfSlice {
pub data: std::sync::Arc<Vec<u8>>, pub data: std::sync::Arc<Vec<u8>>,
pub start: usize, pub start: usize,
pub end: usize, pub end: usize,
} }
impl AsRef<[u8]> for SwfSlice { impl AsRef<[u8]> for SwfSlice {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
&self.data[self.start..self.end] &self.data[self.start..self.end]
} }
} }
pub fn decode_tags<'a, R, F>( pub fn decode_tags<'a, R, F>(
reader: &'a mut SwfStream<R>, reader: &'a mut SwfStream<R>,
mut tag_callback: F, mut tag_callback: F,
stop_tag: TagCode, stop_tag: TagCode,
) -> Result<(), Box<dyn std::error::Error>> ) -> Result<(), Box<dyn std::error::Error>>
where where
R: 'a + AsRef<[u8]>, R: 'a + AsRef<[u8]>,
F: FnMut(&mut SwfStream<R>, TagCode, usize) -> DecodeResult, F: FnMut(&mut SwfStream<R>, TagCode, usize) -> DecodeResult,
{ {
loop { loop {
let (tag_code, tag_len) = reader.read_tag_code_and_length()?; let (tag_code, tag_len) = reader.read_tag_code_and_length()?;
let end_pos = reader.get_ref().position() + tag_len as u64; let end_pos = reader.get_ref().position() + tag_len as u64;
let tag = TagCode::from_u16(tag_code); let tag = TagCode::from_u16(tag_code);
if let Some(tag) = tag { if let Some(tag) = tag {
let result = tag_callback(reader, tag, tag_len); let result = tag_callback(reader, tag, tag_len);
if let Err(_e) = result { if let Err(_e) = result {
log::error!("Error running definition tag: {:?}", tag); log::error!("Error running definition tag: {:?}", tag);
} }
if stop_tag == tag { if stop_tag == tag {
break; break;
} }
} else { } else {
log::warn!("Unknown tag code: {:?}", tag_code); log::warn!("Unknown tag code: {:?}", tag_code);
} }
use std::io::{Seek, SeekFrom}; use std::io::{Seek, SeekFrom};
reader.get_mut().seek(SeekFrom::Start(end_pos))?; reader.get_mut().seek(SeekFrom::Start(end_pos))?;
} }
Ok(()) Ok(())
} }

View File

@ -31,7 +31,10 @@ impl TransformStack {
let cur_transform = self.transform(); let cur_transform = self.transform();
let matrix = cur_transform.matrix * transform.matrix; let matrix = cur_transform.matrix * transform.matrix;
let color_transform = cur_transform.color_transform * transform.color_transform; let color_transform = cur_transform.color_transform * transform.color_transform;
self.0.push(Transform { matrix, color_transform }); self.0.push(Transform {
matrix,
color_transform,
});
} }
pub fn pop(&mut self) { pub fn pop(&mut self) {

View File

@ -1,80 +1,80 @@
//! Tests running SWFs in a headless Ruffle instance. //! Tests running SWFs in a headless Ruffle instance.
//! //!
//! Trace output can be compared with correct output from the official Flash Payer. //! Trace output can be compared with correct output from the official Flash Payer.
use log::{Metadata, Record}; use log::{Metadata, Record};
use ruffle_core::backend::{audio::NullAudioBackend, render::NullRenderer}; use ruffle_core::backend::{audio::NullAudioBackend, render::NullRenderer};
use ruffle_core::Player; use ruffle_core::Player;
use std::cell::RefCell; use std::cell::RefCell;
type Error = Box<dyn std::error::Error>; type Error = Box<dyn std::error::Error>;
// This macro generates test cases for a given list of SWFs. // This macro generates test cases for a given list of SWFs.
macro_rules! swf_tests { macro_rules! swf_tests {
($(($name:ident, $path:expr, $num_frames:literal),)*) => { ($(($name:ident, $path:expr, $num_frames:literal),)*) => {
$( $(
#[test] #[test]
fn $name() -> Result<(), Error> { fn $name() -> Result<(), Error> {
test_swf( test_swf(
concat!("tests/swfs/", $path, "/test.swf"), concat!("tests/swfs/", $path, "/test.swf"),
$num_frames, $num_frames,
concat!("tests/swfs/", $path, "/output.txt"), concat!("tests/swfs/", $path, "/output.txt"),
) )
} }
)* )*
} }
} }
// List of SWFs to test. // List of SWFs to test.
// Format: (test_name, test_folder, number_of_frames_to_run) // Format: (test_name, test_folder, number_of_frames_to_run)
// The test folder is a relative to core/tests/swfs // The test folder is a relative to core/tests/swfs
// Inside the folder is expected to be "test.swf" and "output.txt" with the correct output. // Inside the folder is expected to be "test.swf" and "output.txt" with the correct output.
swf_tests! { swf_tests! {
(single_frame, "avm1/single_frame", 2), (single_frame, "avm1/single_frame", 2),
(looping, "avm1/looping", 6), (looping, "avm1/looping", 6),
} }
/// Loads an SWF and runs it through the Ruffle core for a number of frames. /// Loads an SWF and runs it through the Ruffle core for a number of frames.
/// Tests that the trace output matches the given expected output. /// Tests that the trace output matches the given expected output.
fn test_swf(swf_path: &str, num_frames: u32, expected_output_path: &str) -> Result<(), Error> { fn test_swf(swf_path: &str, num_frames: u32, expected_output_path: &str) -> Result<(), Error> {
let _ = log::set_logger(&TRACE_LOGGER).map(|()| log::set_max_level(log::LevelFilter::Info)); let _ = log::set_logger(&TRACE_LOGGER).map(|()| log::set_max_level(log::LevelFilter::Info));
let expected_output = std::fs::read_to_string(expected_output_path)?.replace("\r\n", "\n"); let expected_output = std::fs::read_to_string(expected_output_path)?.replace("\r\n", "\n");
let swf_data = std::fs::read(swf_path)?; let swf_data = std::fs::read(swf_path)?;
let mut player = Player::new(NullRenderer, NullAudioBackend::new(), swf_data)?; let mut player = Player::new(NullRenderer, NullAudioBackend::new(), swf_data)?;
for _ in 0..num_frames { for _ in 0..num_frames {
player.run_frame(); player.run_frame();
} }
assert_eq!(trace_log(), expected_output); assert_eq!(trace_log(), expected_output);
Ok(()) Ok(())
} }
thread_local! { thread_local! {
static TRACE_LOG: RefCell<String> = RefCell::new(String::new()); static TRACE_LOG: RefCell<String> = RefCell::new(String::new());
} }
static TRACE_LOGGER: TraceLogger = TraceLogger; static TRACE_LOGGER: TraceLogger = TraceLogger;
/// `TraceLogger` captures output from AVM trace actions into a String. /// `TraceLogger` captures output from AVM trace actions into a String.
struct TraceLogger; struct TraceLogger;
fn trace_log() -> String { fn trace_log() -> String {
TRACE_LOG.with(|log| log.borrow().clone()) TRACE_LOG.with(|log| log.borrow().clone())
} }
impl log::Log for TraceLogger { impl log::Log for TraceLogger {
fn enabled(&self, metadata: &Metadata) -> bool { fn enabled(&self, metadata: &Metadata) -> bool {
metadata.target() == "avm_trace" metadata.target() == "avm_trace"
} }
fn log(&self, record: &Record) { fn log(&self, record: &Record) {
if self.enabled(record.metadata()) { if self.enabled(record.metadata()) {
TRACE_LOG.with(|log| log.borrow_mut().push_str(&format!("{}\n", record.args()))); TRACE_LOG.with(|log| log.borrow_mut().push_str(&format!("{}\n", record.args())));
} }
} }
fn flush(&self) {} fn flush(&self) {}
} }

View File

@ -1,156 +1,156 @@
use generational_arena::Arena; use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{stream_tag_reader, AdpcmDecoder, Decoder, Mp3Decoder}; use ruffle_core::backend::audio::decoders::{stream_tag_reader, AdpcmDecoder, Decoder, Mp3Decoder};
use ruffle_core::backend::audio::{swf, AudioBackend, AudioStreamHandle, SoundHandle}; use ruffle_core::backend::audio::{swf, AudioBackend, AudioStreamHandle, SoundHandle};
use std::io::Cursor; use std::io::Cursor;
use std::sync::Arc; use std::sync::Arc;
pub struct RodioAudioBackend { pub struct RodioAudioBackend {
sounds: Arena<Sound>, sounds: Arena<Sound>,
active_sounds: Arena<rodio::Sink>, active_sounds: Arena<rodio::Sink>,
streams: Arena<AudioStream>, streams: Arena<AudioStream>,
device: rodio::Device, device: rodio::Device,
} }
#[allow(dead_code)] #[allow(dead_code)]
struct AudioStream { struct AudioStream {
clip_id: swf::CharacterId, clip_id: swf::CharacterId,
info: swf::SoundStreamHead, info: swf::SoundStreamHead,
sink: rodio::Sink, sink: rodio::Sink,
} }
#[allow(dead_code)] #[allow(dead_code)]
struct Sound { struct Sound {
format: swf::SoundFormat, format: swf::SoundFormat,
data: Arc<Vec<u8>>, data: Arc<Vec<u8>>,
} }
impl RodioAudioBackend { impl RodioAudioBackend {
pub fn new() -> Result<Self, Box<dyn std::error::Error>> { pub fn new() -> Result<Self, Box<dyn std::error::Error>> {
Ok(Self { Ok(Self {
sounds: Arena::new(), sounds: Arena::new(),
streams: Arena::new(), streams: Arena::new(),
active_sounds: Arena::new(), active_sounds: Arena::new(),
device: rodio::default_output_device().ok_or("Unable to create output device")?, device: rodio::default_output_device().ok_or("Unable to create output device")?,
}) })
} }
} }
impl AudioBackend for RodioAudioBackend { impl AudioBackend for RodioAudioBackend {
fn register_sound( fn register_sound(
&mut self, &mut self,
swf_sound: &swf::Sound, swf_sound: &swf::Sound,
) -> Result<SoundHandle, Box<dyn std::error::Error>> { ) -> Result<SoundHandle, Box<dyn std::error::Error>> {
let sound = Sound { let sound = Sound {
format: swf_sound.format.clone(), format: swf_sound.format.clone(),
data: Arc::new(swf_sound.data.clone()), data: Arc::new(swf_sound.data.clone()),
}; };
Ok(self.sounds.insert(sound)) Ok(self.sounds.insert(sound))
} }
fn start_stream( fn start_stream(
&mut self, &mut self,
clip_id: swf::CharacterId, clip_id: swf::CharacterId,
clip_data: ruffle_core::tag_utils::SwfSlice, clip_data: ruffle_core::tag_utils::SwfSlice,
stream_info: &swf::SoundStreamHead, stream_info: &swf::SoundStreamHead,
) -> AudioStreamHandle { ) -> AudioStreamHandle {
let sink = rodio::Sink::new(&self.device); let sink = rodio::Sink::new(&self.device);
let format = &stream_info.stream_format; let format = &stream_info.stream_format;
let decoder = Mp3Decoder::new( let decoder = Mp3Decoder::new(
if format.is_stereo { 2 } else { 1 }, if format.is_stereo { 2 } else { 1 },
format.sample_rate.into(), format.sample_rate.into(),
stream_tag_reader(clip_data), stream_tag_reader(clip_data),
); );
let stream = AudioStream { let stream = AudioStream {
clip_id, clip_id,
info: stream_info.clone(), info: stream_info.clone(),
sink, sink,
}; };
stream.sink.append(DecoderSource(Box::new(decoder))); stream.sink.append(DecoderSource(Box::new(decoder)));
self.streams.insert(stream) self.streams.insert(stream)
} }
fn play_sound(&mut self, sound: SoundHandle) { fn play_sound(&mut self, sound: SoundHandle) {
let sound = &self.sounds[sound]; let sound = &self.sounds[sound];
use swf::AudioCompression; use swf::AudioCompression;
match sound.format.compression { match sound.format.compression {
AudioCompression::Uncompressed => { AudioCompression::Uncompressed => {
let mut data = Vec::with_capacity(sound.data.len() / 2); let mut data = Vec::with_capacity(sound.data.len() / 2);
let mut i = 0; let mut i = 0;
while i < sound.data.len() { while i < sound.data.len() {
let val = i16::from(sound.data[i]) | (i16::from(sound.data[i + 1]) << 8); let val = i16::from(sound.data[i]) | (i16::from(sound.data[i + 1]) << 8);
data.push(val); data.push(val);
i += 2; i += 2;
} }
let buffer = rodio::buffer::SamplesBuffer::new( let buffer = rodio::buffer::SamplesBuffer::new(
if sound.format.is_stereo { 2 } else { 1 }, if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(), sound.format.sample_rate.into(),
data, data,
); );
let sink = rodio::Sink::new(&self.device); let sink = rodio::Sink::new(&self.device);
sink.append(buffer); sink.append(buffer);
self.active_sounds.insert(sink); self.active_sounds.insert(sink);
} }
AudioCompression::Adpcm => { AudioCompression::Adpcm => {
let decoder = AdpcmDecoder::new( let decoder = AdpcmDecoder::new(
Cursor::new(sound.data.to_vec()), Cursor::new(sound.data.to_vec()),
sound.format.is_stereo, sound.format.is_stereo,
sound.format.sample_rate, sound.format.sample_rate,
) )
.unwrap(); .unwrap();
let sink = rodio::Sink::new(&self.device); let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder))); sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink); self.active_sounds.insert(sink);
} }
AudioCompression::Mp3 => { AudioCompression::Mp3 => {
let decoder = Mp3Decoder::new( let decoder = Mp3Decoder::new(
if sound.format.is_stereo { 2 } else { 1 }, if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(), sound.format.sample_rate.into(),
Cursor::new(sound.data.to_vec()), Cursor::new(sound.data.to_vec()),
); );
let sink = rodio::Sink::new(&self.device); let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder))); sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink); self.active_sounds.insert(sink);
} }
_ => unimplemented!(), _ => unimplemented!(),
} }
} }
fn tick(&mut self) { fn tick(&mut self) {
self.active_sounds.retain(|_, sink| !sink.empty()); self.active_sounds.retain(|_, sink| !sink.empty());
} }
} }
struct DecoderSource(Box<dyn Decoder + Send>); struct DecoderSource(Box<dyn Decoder + Send>);
impl Iterator for DecoderSource { impl Iterator for DecoderSource {
type Item = i16; type Item = i16;
#[inline] #[inline]
fn next(&mut self) -> Option<i16> { fn next(&mut self) -> Option<i16> {
self.0.next() self.0.next()
} }
} }
impl rodio::Source for DecoderSource { impl rodio::Source for DecoderSource {
#[inline] #[inline]
fn current_frame_len(&self) -> Option<usize> { fn current_frame_len(&self) -> Option<usize> {
None None
} }
#[inline] #[inline]
fn channels(&self) -> u16 { fn channels(&self) -> u16 {
self.0.num_channels().into() self.0.num_channels().into()
} }
#[inline] #[inline]
fn sample_rate(&self) -> u32 { fn sample_rate(&self) -> u32 {
self.0.sample_rate().into() self.0.sample_rate().into()
} }
#[inline] #[inline]
fn total_duration(&self) -> Option<std::time::Duration> { fn total_duration(&self) -> Option<std::time::Duration> {
None None
} }
} }

File diff suppressed because it is too large Load Diff

1
rustfmt.toml Normal file
View File

@ -0,0 +1 @@
newline_style = "Unix"

View File

@ -1,439 +1,500 @@
use fnv::FnvHashMap; use fnv::FnvHashMap;
use generational_arena::Arena; use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{AdpcmDecoder, Mp3Decoder}; use ruffle_core::backend::audio::decoders::{AdpcmDecoder, Mp3Decoder};
use ruffle_core::backend::audio::{AudioBackend, AudioStreamHandle, SoundHandle}; use ruffle_core::backend::audio::swf::{self, AudioCompression};
use ruffle_core::backend::audio::swf::{self, AudioCompression}; use ruffle_core::backend::audio::{AudioBackend, AudioStreamHandle, SoundHandle};
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::rc::Rc; use std::rc::Rc;
use wasm_bindgen::{closure::Closure, JsCast}; use wasm_bindgen::{closure::Closure, JsCast};
use web_sys::AudioContext; use web_sys::AudioContext;
pub struct WebAudioBackend { pub struct WebAudioBackend {
context: AudioContext, context: AudioContext,
sounds: Arena<Sound>, sounds: Arena<Sound>,
stream_data: FnvHashMap<swf::CharacterId, StreamData>, stream_data: FnvHashMap<swf::CharacterId, StreamData>,
id_to_sound: FnvHashMap<swf::CharacterId, SoundHandle>, id_to_sound: FnvHashMap<swf::CharacterId, SoundHandle>,
left_samples: Vec<f32>, left_samples: Vec<f32>,
right_samples: Vec<f32>, right_samples: Vec<f32>,
} }
thread_local! { thread_local! {
static STREAMS: RefCell<Arena<AudioStream>> = RefCell::new(Arena::new()); static STREAMS: RefCell<Arena<AudioStream>> = RefCell::new(Arena::new());
static NUM_SOUNDS_LOADING: Cell<u32> = Cell::new(0); static NUM_SOUNDS_LOADING: Cell<u32> = Cell::new(0);
} }
struct StreamData { struct StreamData {
format: swf::SoundFormat, format: swf::SoundFormat,
audio_data: Vec<u8>, audio_data: Vec<u8>,
num_sample_frames: u32, num_sample_frames: u32,
samples_per_block: u32, samples_per_block: u32,
} }
type AudioBufferPtr = Rc<RefCell<web_sys::AudioBuffer>>; type AudioBufferPtr = Rc<RefCell<web_sys::AudioBuffer>>;
// A sound can be either as a JS AudioBuffer and as a on--the-fly decoded stream using a ScriptProcessorNode. // A sound can be either as a JS AudioBuffer and as a on--the-fly decoded stream using a ScriptProcessorNode.
#[allow(dead_code)] #[allow(dead_code)]
enum SoundSource { enum SoundSource {
// Pre-decoded audio buffer. // Pre-decoded audio buffer.
AudioBuffer(AudioBufferPtr), AudioBuffer(AudioBufferPtr),
// Decode the audio data on the fly from a byte stream. // Decode the audio data on the fly from a byte stream.
Decoder(Vec<u8>), Decoder(Vec<u8>),
} }
struct Sound { struct Sound {
format: swf::SoundFormat, format: swf::SoundFormat,
source: SoundSource, source: SoundSource,
} }
type Decoder = Box<dyn Iterator<Item=i16>>; type Decoder = Box<dyn Iterator<Item = i16>>;
#[allow(dead_code)] #[allow(dead_code)]
enum AudioStream { enum AudioStream {
Decoder { decoder: Decoder, is_stereo: bool, },// closure: Option<Closure<Box<FnMut(web_sys::AudioProcessingEvent)>>> } , Decoder {
AudioBuffer { node: web_sys::AudioBufferSourceNode }, decoder: Decoder,
} is_stereo: bool,
}, // closure: Option<Closure<Box<FnMut(web_sys::AudioProcessingEvent)>>> } ,
type Error = Box<dyn std::error::Error>; AudioBuffer {
node: web_sys::AudioBufferSourceNode,
impl WebAudioBackend { },
pub fn new() -> Result<Self, Error> { }
let context = AudioContext::new().map_err(|_| "Unable to create AudioContext")?;
Ok(Self { type Error = Box<dyn std::error::Error>;
context,
sounds: Arena::new(), impl WebAudioBackend {
stream_data: FnvHashMap::default(), pub fn new() -> Result<Self, Error> {
id_to_sound: FnvHashMap::default(), let context = AudioContext::new().map_err(|_| "Unable to create AudioContext")?;
left_samples: vec![], Ok(Self {
right_samples: vec![], context,
}) sounds: Arena::new(),
} stream_data: FnvHashMap::default(),
id_to_sound: FnvHashMap::default(),
fn play_sound_internal(&mut self, handle: SoundHandle) -> SoundHandle { left_samples: vec![],
let sound = self.sounds.get(handle).unwrap(); right_samples: vec![],
match &sound.source { })
SoundSource::AudioBuffer(audio_buffer) => { }
let audio_buffer = audio_buffer.borrow();
let node = self.context.create_buffer_source().unwrap(); fn play_sound_internal(&mut self, handle: SoundHandle) -> SoundHandle {
node.set_buffer(Some(&*audio_buffer)); let sound = self.sounds.get(handle).unwrap();
node match &sound.source {
.connect_with_audio_node(&self.context.destination()) SoundSource::AudioBuffer(audio_buffer) => {
.unwrap(); let audio_buffer = audio_buffer.borrow();
node.start().unwrap(); let node = self.context.create_buffer_source().unwrap();
node.set_buffer(Some(&*audio_buffer));
let audio_stream = AudioStream::AudioBuffer { node.connect_with_audio_node(&self.context.destination())
node .unwrap();
}; node.start().unwrap();
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut(); let audio_stream = AudioStream::AudioBuffer { node };
streams.insert(audio_stream) STREAMS.with(|streams| {
}) let mut streams = streams.borrow_mut();
} streams.insert(audio_stream)
SoundSource::Decoder(audio_data) => { })
let decoder: Decoder = match sound.format.compression { }
AudioCompression::Adpcm => Box::new(AdpcmDecoder::new( SoundSource::Decoder(audio_data) => {
std::io::Cursor::new(audio_data.to_vec()), let decoder: Decoder = match sound.format.compression {
sound.format.is_stereo, AudioCompression::Adpcm => Box::new(
sound.format.sample_rate AdpcmDecoder::new(
).unwrap()), std::io::Cursor::new(audio_data.to_vec()),
AudioCompression::Mp3 => Box::new(Mp3Decoder::new( sound.format.is_stereo,
if sound.format.is_stereo { sound.format.sample_rate,
2 )
} else { .unwrap(),
1 ),
}, AudioCompression::Mp3 => Box::new(Mp3Decoder::new(
sound.format.sample_rate.into(), if sound.format.is_stereo { 2 } else { 1 },
std::io::Cursor::new(audio_data.to_vec())//&sound.data[..] sound.format.sample_rate.into(),
)), std::io::Cursor::new(audio_data.to_vec()), //&sound.data[..]
_ => unimplemented!() )),
}; _ => unimplemented!(),
};
let decoder: Decoder = if sound.format.sample_rate != self.context.sample_rate() as u16 {
Box::new(resample(decoder, sound.format.sample_rate, self.context.sample_rate() as u16, sound.format.is_stereo)) let decoder: Decoder =
} else { if sound.format.sample_rate != self.context.sample_rate() as u16 {
decoder Box::new(resample(
}; decoder,
sound.format.sample_rate,
let audio_stream = AudioStream::Decoder { self.context.sample_rate() as u16,
decoder, sound.format.is_stereo,
is_stereo: sound.format.is_stereo, ))
//closure: None, } else {
}; decoder
STREAMS.with(|streams| { };
let mut streams = streams.borrow_mut();
let stream_handle = streams.insert(audio_stream); let audio_stream = AudioStream::Decoder {
let script_processor_node = self.context.create_script_processor_with_buffer_size_and_number_of_input_channels_and_number_of_output_channels(4096, 0, if sound.format.is_stereo { 2 } else { 1 }).unwrap(); decoder,
let script_node = script_processor_node.clone(); is_stereo: sound.format.is_stereo,
//closure: None,
let closure = Closure::wrap(Box::new(move |event| { };
STREAMS.with(|streams| { STREAMS.with(|streams| {
let mut streams = streams.borrow_mut(); let mut streams = streams.borrow_mut();
let audio_stream = streams.get_mut(stream_handle).unwrap(); let stream_handle = streams.insert(audio_stream);
let complete = WebAudioBackend::update_script_processor(audio_stream, event); let script_processor_node = self.context.create_script_processor_with_buffer_size_and_number_of_input_channels_and_number_of_output_channels(4096, 0, if sound.format.is_stereo { 2 } else { 1 }).unwrap();
if complete { let script_node = script_processor_node.clone();
streams.remove(stream_handle); let closure = Closure::wrap(Box::new(move |event| {
script_node.disconnect().unwrap(); STREAMS.with(|streams| {
} let mut streams = streams.borrow_mut();
}) let audio_stream = streams.get_mut(stream_handle).unwrap();
}) as Box<dyn FnMut(web_sys::AudioProcessingEvent)>); let complete = WebAudioBackend::update_script_processor(audio_stream, event);
script_processor_node.set_onaudioprocess(Some(closure.as_ref().unchecked_ref())); if complete {
// TODO: This will leak memory per playing sound. Remember and properly drop the closure. streams.remove(stream_handle);
closure.forget(); script_node.disconnect().unwrap();
}
stream_handle })
}) }) as Box<dyn FnMut(web_sys::AudioProcessingEvent)>);
} script_processor_node.set_onaudioprocess(Some(closure.as_ref().unchecked_ref()));
} // TODO: This will leak memory per playing sound. Remember and properly drop the closure.
} closure.forget();
fn decompress_to_audio_buffer(&mut self, format: &swf::SoundFormat, audio_data: &[u8], num_sample_frames: u32) -> AudioBufferPtr { stream_handle
if format.compression == AudioCompression::Mp3 { })
return self.decompress_mp3_to_audio_buffer(format, audio_data, num_sample_frames); }
} }
}
// This sucks. Firefox doesn't like 5512Hz sample rate, so manually double up the samples.
// 5512Hz should be relatively rare. fn decompress_to_audio_buffer(
let audio_buffer = if format.sample_rate > 5512 { &mut self,
self.context.create_buffer( format: &swf::SoundFormat,
if format.is_stereo { 2 } else { 1 }, audio_data: &[u8],
num_sample_frames, num_sample_frames: u32,
f32::from(format.sample_rate) ) -> AudioBufferPtr {
).unwrap() if format.compression == AudioCompression::Mp3 {
} else { return self.decompress_mp3_to_audio_buffer(format, audio_data, num_sample_frames);
self.context.create_buffer( }
if format.is_stereo { 2 } else { 1 },
num_sample_frames * 2, // This sucks. Firefox doesn't like 5512Hz sample rate, so manually double up the samples.
11025.0 // 5512Hz should be relatively rare.
).unwrap() let audio_buffer = if format.sample_rate > 5512 {
}; self.context
.create_buffer(
match format.compression { if format.is_stereo { 2 } else { 1 },
AudioCompression::Uncompressed => { num_sample_frames,
// TODO: Check for is_16_bit. f32::from(format.sample_rate),
self.left_samples = audio_data.iter().step_by(2).cloned().map(|n| f32::from(n) / 32767.0).collect(); )
if format.is_stereo { .unwrap()
self.right_samples = audio_data.iter().skip(1).step_by(2).cloned().map(|n| f32::from(n) / 32767.0).collect(); } else {
} self.context
} .create_buffer(
AudioCompression::Adpcm => { if format.is_stereo { 2 } else { 1 },
let mut decoder = AdpcmDecoder::new(audio_data, num_sample_frames * 2,
format.is_stereo, 11025.0,
format.sample_rate )
).unwrap(); .unwrap()
if format.is_stereo { };
while let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
self.left_samples.push(f32::from(l) / 32767.0); match format.compression {
self.right_samples.push(f32::from(r) / 32767.0); AudioCompression::Uncompressed => {
} // TODO: Check for is_16_bit.
} else { self.left_samples = audio_data
self.left_samples = decoder.map(|n| f32::from(n) / 32767.0).collect(); .iter()
} .step_by(2)
} .cloned()
_ => unimplemented!(), .map(|n| f32::from(n) / 32767.0)
} .collect();
if format.is_stereo {
// Double up samples for 5512Hz audio to satisfy Firefox. self.right_samples = audio_data
if format.sample_rate == 5512 { .iter()
let mut samples = Vec::with_capacity(self.left_samples.len() * 2); .skip(1)
for sample in &self.left_samples { .step_by(2)
samples.push(*sample); .cloned()
samples.push(*sample); .map(|n| f32::from(n) / 32767.0)
} .collect();
self.left_samples = samples; }
}
if format.is_stereo { AudioCompression::Adpcm => {
let mut samples = Vec::with_capacity(self.right_samples.len() * 2); let mut decoder =
for sample in &self.right_samples { AdpcmDecoder::new(audio_data, format.is_stereo, format.sample_rate).unwrap();
samples.push(*sample); if format.is_stereo {
samples.push(*sample); while let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
} self.left_samples.push(f32::from(l) / 32767.0);
self.right_samples = samples; self.right_samples.push(f32::from(r) / 32767.0);
} }
} } else {
self.left_samples = decoder.map(|n| f32::from(n) / 32767.0).collect();
audio_buffer.copy_to_channel(&mut self.left_samples, 0).unwrap(); }
if format.is_stereo { }
audio_buffer.copy_to_channel(&mut self.right_samples, 1).unwrap(); _ => unimplemented!(),
} }
Rc::new(RefCell::new(audio_buffer)) // Double up samples for 5512Hz audio to satisfy Firefox.
} if format.sample_rate == 5512 {
let mut samples = Vec::with_capacity(self.left_samples.len() * 2);
fn decompress_mp3_to_audio_buffer(&mut self, format: &swf::SoundFormat, audio_data: &[u8], _num_sample_frames: u32) -> AudioBufferPtr { for sample in &self.left_samples {
// We use the Web decodeAudioData API to decode MP3 data. samples.push(*sample);
// TODO: Is it possible we finish loading before the MP3 is decoding? samples.push(*sample);
let audio_buffer = self.context.create_buffer(1, 1, self.context.sample_rate()).unwrap(); }
let audio_buffer = Rc::new(RefCell::new(audio_buffer)); self.left_samples = samples;
let data_array = unsafe { js_sys::Uint8Array::view(&audio_data[..]) }; if format.is_stereo {
let array_buffer = data_array.buffer().slice_with_end( let mut samples = Vec::with_capacity(self.right_samples.len() * 2);
data_array.byte_offset(), for sample in &self.right_samples {
data_array.byte_offset() + data_array.byte_length(), samples.push(*sample);
); samples.push(*sample);
}
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() + 1)); self.right_samples = samples;
}
let _num_channels = if format.is_stereo { 2 } else { 1 }; }
let buffer_ptr = Rc::clone(&audio_buffer);
let success_closure = Closure::wrap(Box::new(move |buffer: web_sys::AudioBuffer| { audio_buffer
*buffer_ptr.borrow_mut() = buffer; .copy_to_channel(&mut self.left_samples, 0)
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1)); .unwrap();
}) if format.is_stereo {
as Box<dyn FnMut(web_sys::AudioBuffer)>); audio_buffer
let error_closure = Closure::wrap(Box::new(move || { .copy_to_channel(&mut self.right_samples, 1)
log::info!("Error decoding MP3 audio"); .unwrap();
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1)); }
})
as Box<dyn FnMut()>); Rc::new(RefCell::new(audio_buffer))
self.context.decode_audio_data_with_success_callback_and_error_callback( }
&array_buffer,
success_closure.as_ref().unchecked_ref(), fn decompress_mp3_to_audio_buffer(
error_closure.as_ref().unchecked_ref() &mut self,
).unwrap(); format: &swf::SoundFormat,
audio_data: &[u8],
// TODO: This will leak memory (once per decompressed MP3). _num_sample_frames: u32,
// Not a huge deal as there are probably not many MP3s in an SWF. ) -> AudioBufferPtr {
success_closure.forget(); // We use the Web decodeAudioData API to decode MP3 data.
error_closure.forget(); // TODO: Is it possible we finish loading before the MP3 is decoding?
let audio_buffer = self
audio_buffer .context
} .create_buffer(1, 1, self.context.sample_rate())
.unwrap();
fn update_script_processor( let audio_buffer = Rc::new(RefCell::new(audio_buffer));
audio_stream: &mut AudioStream,
event: web_sys::AudioProcessingEvent, let data_array = unsafe { js_sys::Uint8Array::view(&audio_data[..]) };
) -> bool { let array_buffer = data_array.buffer().slice_with_end(
let mut complete = false; data_array.byte_offset(),
let mut left_samples = vec![]; data_array.byte_offset() + data_array.byte_length(),
let mut right_samples = vec![]; );
if let AudioStream::Decoder { decoder, is_stereo, .. } = audio_stream {
let output_buffer = event.output_buffer().unwrap(); NUM_SOUNDS_LOADING.with(|n| n.set(n.get() + 1));
let num_frames = output_buffer.length() as usize;
let _num_channels = if format.is_stereo { 2 } else { 1 };
for _ in 0..num_frames { let buffer_ptr = Rc::clone(&audio_buffer);
if let (Some(l), Some(r)) = (decoder.next(), decoder.next()) { let success_closure = Closure::wrap(Box::new(move |buffer: web_sys::AudioBuffer| {
left_samples.push(f32::from(l) / 32767.0); *buffer_ptr.borrow_mut() = buffer;
if *is_stereo { NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
right_samples.push(f32::from(r) / 32767.0); }) as Box<dyn FnMut(web_sys::AudioBuffer)>);
} let error_closure = Closure::wrap(Box::new(move || {
} else { log::info!("Error decoding MP3 audio");
complete = true; NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
break; }) as Box<dyn FnMut()>);
} self.context
} .decode_audio_data_with_success_callback_and_error_callback(
output_buffer.copy_to_channel(&mut left_samples[..], 0).unwrap(); &array_buffer,
if *is_stereo { success_closure.as_ref().unchecked_ref(),
output_buffer.copy_to_channel(&mut right_samples[..], 1).unwrap(); error_closure.as_ref().unchecked_ref(),
} )
} .unwrap();
complete // TODO: This will leak memory (once per decompressed MP3).
} // Not a huge deal as there are probably not many MP3s in an SWF.
} success_closure.forget();
error_closure.forget();
impl AudioBackend for WebAudioBackend {
fn register_sound(&mut self, sound: &swf::Sound) -> Result<SoundHandle, Error> { audio_buffer
// Slice off latency seek for MP3 data. }
let data = if sound.format.compression == AudioCompression::Mp3 {
&sound.data[2..] fn update_script_processor(
} else { audio_stream: &mut AudioStream,
&sound.data[..] event: web_sys::AudioProcessingEvent,
}; ) -> bool {
let mut complete = false;
let sound = Sound { let mut left_samples = vec![];
format: sound.format.clone(), let mut right_samples = vec![];
source: SoundSource::AudioBuffer(self.decompress_to_audio_buffer(&sound.format, data, sound.num_samples)), if let AudioStream::Decoder {
}; decoder, is_stereo, ..
Ok(self.sounds.insert(sound)) } = audio_stream
} {
let output_buffer = event.output_buffer().unwrap();
fn preload_sound_stream_head(&mut self, clip_id: swf::CharacterId, stream_info: &swf::SoundStreamHead) { let num_frames = output_buffer.length() as usize;
self.stream_data.entry(clip_id).or_insert_with(|| {
StreamData { for _ in 0..num_frames {
format: stream_info.stream_format.clone(), if let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
audio_data: vec![], left_samples.push(f32::from(l) / 32767.0);
num_sample_frames: 0, if *is_stereo {
samples_per_block: stream_info.num_samples_per_block.into(), right_samples.push(f32::from(r) / 32767.0);
} }
}); } else {
} complete = true;
break;
fn preload_sound_stream_block(&mut self, clip_id: swf::CharacterId, audio_data: &[u8]) { }
if let Some(stream) = self.stream_data.get_mut(&clip_id) { }
match stream.format.compression { output_buffer
AudioCompression::Uncompressed | AudioCompression::UncompressedUnknownEndian => { .copy_to_channel(&mut left_samples[..], 0)
let frame_len = if stream.format.is_stereo { 2 } else { 1 } * if stream.format.is_16_bit { 2 } else { 1 }; .unwrap();
stream.num_sample_frames += (audio_data.len() as u32) / frame_len; if *is_stereo {
stream.audio_data.extend_from_slice(audio_data); output_buffer
} .copy_to_channel(&mut right_samples[..], 1)
AudioCompression::Mp3 => { .unwrap();
let num_sample_frames = (u32::from(audio_data[2]) << 8) | u32::from(audio_data[3]); }
stream.num_sample_frames += num_sample_frames; }
// MP3 streaming data:
// First two bytes = number of samples complete
// Second two bytes = 'latency seek' (amount to skip when seeking to this frame) }
stream.audio_data.extend_from_slice(&audio_data[4..]); }
}
_ => { impl AudioBackend for WebAudioBackend {
// TODO: This is a guess and will vary slightly from block to block! fn register_sound(&mut self, sound: &swf::Sound) -> Result<SoundHandle, Error> {
stream.num_sample_frames += stream.samples_per_block; // Slice off latency seek for MP3 data.
} let data = if sound.format.compression == AudioCompression::Mp3 {
} &sound.data[2..]
} } else {
} &sound.data[..]
};
fn preload_sound_stream_end(&mut self, clip_id: swf::CharacterId) {
if let Some(stream) = self.stream_data.remove(&clip_id) { let sound = Sound {
if !stream.audio_data.is_empty() format: sound.format.clone(),
{ source: SoundSource::AudioBuffer(self.decompress_to_audio_buffer(
let audio_buffer = self.decompress_to_audio_buffer(&stream.format, &stream.audio_data[..], stream.num_sample_frames); &sound.format,
let handle = self.sounds.insert(Sound { data,
format: stream.format, sound.num_samples,
source: SoundSource::AudioBuffer(audio_buffer), )),
}); };
self.id_to_sound.insert(clip_id, handle); Ok(self.sounds.insert(sound))
} }
}
} fn preload_sound_stream_head(
&mut self,
fn play_sound(&mut self, sound: SoundHandle) { clip_id: swf::CharacterId,
self.play_sound_internal(sound); stream_info: &swf::SoundStreamHead,
} ) {
self.stream_data
fn start_stream( .entry(clip_id)
&mut self, .or_insert_with(|| StreamData {
clip_id: swf::CharacterId, format: stream_info.stream_format.clone(),
_clip_data: ruffle_core::tag_utils::SwfSlice, audio_data: vec![],
_stream_info: &swf::SoundStreamHead, num_sample_frames: 0,
) -> AudioStreamHandle { samples_per_block: stream_info.num_samples_per_block.into(),
if let Some(&handle) = self.id_to_sound.get(&clip_id) { });
self.play_sound_internal(handle) }
} else {
log::error!("Missing stream for clip {}", clip_id); fn preload_sound_stream_block(&mut self, clip_id: swf::CharacterId, audio_data: &[u8]) {
// TODO: Return dummy sound. if let Some(stream) = self.stream_data.get_mut(&clip_id) {
panic!(); match stream.format.compression {
} AudioCompression::Uncompressed | AudioCompression::UncompressedUnknownEndian => {
} let frame_len = if stream.format.is_stereo { 2 } else { 1 }
* if stream.format.is_16_bit { 2 } else { 1 };
fn is_loading_complete(&self) -> bool { stream.num_sample_frames += (audio_data.len() as u32) / frame_len;
NUM_SOUNDS_LOADING.with(|n| n.get() == 0) stream.audio_data.extend_from_slice(audio_data);
} }
AudioCompression::Mp3 => {
fn prime_audio(&mut self) { let num_sample_frames =
// Allow audio to start playing after a user gesture. (u32::from(audio_data[2]) << 8) | u32::from(audio_data[3]);
let _ = self.context.resume(); stream.num_sample_frames += num_sample_frames;
} // MP3 streaming data:
} // First two bytes = number of samples
// Second two bytes = 'latency seek' (amount to skip when seeking to this frame)
// Janky resmapling code. stream.audio_data.extend_from_slice(&audio_data[4..]);
// TODO: Clean this up. }
fn resample(mut input: impl Iterator<Item=i16>, input_sample_rate: u16, output_sample_rate: u16, is_stereo: bool) -> impl Iterator<Item=i16> { _ => {
let (mut left0, mut right0) = if is_stereo { // TODO: This is a guess and will vary slightly from block to block!
(input.next(), input.next()) stream.num_sample_frames += stream.samples_per_block;
} else { }
let sample = input.next(); }
(sample, sample) }
}; }
let (mut left1, mut right1) = if is_stereo {
(input.next(), input.next()) fn preload_sound_stream_end(&mut self, clip_id: swf::CharacterId) {
} else { if let Some(stream) = self.stream_data.remove(&clip_id) {
let sample = input.next(); if !stream.audio_data.is_empty() {
(sample, sample) let audio_buffer = self.decompress_to_audio_buffer(
}; &stream.format,
let (mut left, mut right) = (left0.unwrap(), right0.unwrap()); &stream.audio_data[..],
let dt_input = 1.0 / f64::from(input_sample_rate); stream.num_sample_frames,
let dt_output = 1.0 / f64::from(output_sample_rate); );
let mut t = 0.0; let handle = self.sounds.insert(Sound {
let mut cur_channel = 0; format: stream.format,
std::iter::from_fn(move || { source: SoundSource::AudioBuffer(audio_buffer),
if cur_channel == 1 { });
cur_channel = 0; self.id_to_sound.insert(clip_id, handle);
return Some(right); }
} }
if let (Some(l0), Some(r0), Some(l1), Some(r1)) = (left0, right0, left1, right1) { }
let a = t / dt_input;
let l0 = f64::from(l0); fn play_sound(&mut self, sound: SoundHandle) {
let l1 = f64::from(l1); self.play_sound_internal(sound);
let r0 = f64::from(r0); }
let r1 = f64::from(r1);
left = (l0 + (l1 - l0) * a) as i16; fn start_stream(
right = (r0 + (r1 - r0) * a) as i16; &mut self,
t += dt_output; clip_id: swf::CharacterId,
while t >= dt_input { _clip_data: ruffle_core::tag_utils::SwfSlice,
t -= dt_input; _stream_info: &swf::SoundStreamHead,
left0 = left1; ) -> AudioStreamHandle {
right0 = right1; if let Some(&handle) = self.id_to_sound.get(&clip_id) {
left1 = input.next(); self.play_sound_internal(handle)
if is_stereo { } else {
right1 = input.next(); log::error!("Missing stream for clip {}", clip_id);
} else { // TODO: Return dummy sound.
right1 = left1; panic!();
} }
} }
cur_channel = 1;
Some(left) fn is_loading_complete(&self) -> bool {
} else { NUM_SOUNDS_LOADING.with(|n| n.get() == 0)
None }
}
}) fn prime_audio(&mut self) {
} // Allow audio to start playing after a user gesture.
let _ = self.context.resume();
}
}
// Janky resmapling code.
// TODO: Clean this up.
fn resample(
mut input: impl Iterator<Item = i16>,
input_sample_rate: u16,
output_sample_rate: u16,
is_stereo: bool,
) -> impl Iterator<Item = i16> {
let (mut left0, mut right0) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left1, mut right1) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left, mut right) = (left0.unwrap(), right0.unwrap());
let dt_input = 1.0 / f64::from(input_sample_rate);
let dt_output = 1.0 / f64::from(output_sample_rate);
let mut t = 0.0;
let mut cur_channel = 0;
std::iter::from_fn(move || {
if cur_channel == 1 {
cur_channel = 0;
return Some(right);
}
if let (Some(l0), Some(r0), Some(l1), Some(r1)) = (left0, right0, left1, right1) {
let a = t / dt_input;
let l0 = f64::from(l0);
let l1 = f64::from(l1);
let r0 = f64::from(r0);
let r1 = f64::from(r1);
left = (l0 + (l1 - l0) * a) as i16;
right = (r0 + (r1 - r0) * a) as i16;
t += dt_output;
while t >= dt_input {
t -= dt_input;
left0 = left1;
right0 = right1;
left1 = input.next();
if is_stereo {
right1 = input.next();
} else {
right1 = left1;
}
}
cur_channel = 1;
Some(left)
} else {
None
}
})
}

File diff suppressed because it is too large Load Diff