chore: Add rustfmt.toml and rustfmt pass

This commit is contained in:
Mike Welsh 2019-08-26 16:38:37 -07:00
parent 06d9f39c0e
commit 6a5c5ab1df
21 changed files with 6159 additions and 6094 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +1,2 @@
pub mod audio;
pub mod render;
pub mod audio;
pub mod render;

View File

@ -1,73 +1,73 @@
use generational_arena::{Arena, Index};
pub mod decoders;
pub mod swf {
pub use swf::{read, AudioCompression, CharacterId, Sound, SoundFormat, SoundStreamHead};
}
pub type AudioStreamHandle = Index;
pub type SoundHandle = Index;
type Error = Box<dyn std::error::Error>;
pub trait AudioBackend {
fn prime_audio(&mut self) {}
fn register_sound(&mut self, swf_sound: &swf::Sound) -> Result<SoundHandle, Error>;
fn preload_sound_stream_head(
&mut self,
_clip_id: swf::CharacterId,
_stream_info: &swf::SoundStreamHead,
) {
}
fn preload_sound_stream_block(&mut self, _clip_id: swf::CharacterId, _audio_data: &[u8]) {}
fn preload_sound_stream_end(&mut self, _clip_id: swf::CharacterId) {}
fn play_sound(&mut self, sound: SoundHandle);
fn start_stream(
&mut self,
clip_id: crate::prelude::CharacterId,
clip_data: crate::tag_utils::SwfSlice,
handle: &swf::SoundStreamHead,
) -> AudioStreamHandle;
// TODO: Eventually remove this/move it to library.
fn is_loading_complete(&self) -> bool {
true
}
fn tick(&mut self) {}
}
pub struct NullAudioBackend {
sounds: Arena<()>,
streams: Arena<()>,
}
impl NullAudioBackend {
pub fn new() -> NullAudioBackend {
NullAudioBackend {
streams: Arena::new(),
sounds: Arena::new(),
}
}
}
impl AudioBackend for NullAudioBackend {
fn register_sound(&mut self, _sound: &swf::Sound) -> Result<SoundHandle, Error> {
Ok(self.sounds.insert(()))
}
fn play_sound(&mut self, _sound: SoundHandle) {}
fn start_stream(
&mut self,
_clip_id: crate::prelude::CharacterId,
_clip_data: crate::tag_utils::SwfSlice,
_handle: &swf::SoundStreamHead,
) -> AudioStreamHandle {
self.streams.insert(())
}
}
impl Default for NullAudioBackend {
fn default() -> Self {
NullAudioBackend::new()
}
}
use generational_arena::{Arena, Index};
pub mod decoders;
pub mod swf {
pub use swf::{read, AudioCompression, CharacterId, Sound, SoundFormat, SoundStreamHead};
}
pub type AudioStreamHandle = Index;
pub type SoundHandle = Index;
type Error = Box<dyn std::error::Error>;
pub trait AudioBackend {
fn prime_audio(&mut self) {}
fn register_sound(&mut self, swf_sound: &swf::Sound) -> Result<SoundHandle, Error>;
fn preload_sound_stream_head(
&mut self,
_clip_id: swf::CharacterId,
_stream_info: &swf::SoundStreamHead,
) {
}
fn preload_sound_stream_block(&mut self, _clip_id: swf::CharacterId, _audio_data: &[u8]) {}
fn preload_sound_stream_end(&mut self, _clip_id: swf::CharacterId) {}
fn play_sound(&mut self, sound: SoundHandle);
fn start_stream(
&mut self,
clip_id: crate::prelude::CharacterId,
clip_data: crate::tag_utils::SwfSlice,
handle: &swf::SoundStreamHead,
) -> AudioStreamHandle;
// TODO: Eventually remove this/move it to library.
fn is_loading_complete(&self) -> bool {
true
}
fn tick(&mut self) {}
}
pub struct NullAudioBackend {
sounds: Arena<()>,
streams: Arena<()>,
}
impl NullAudioBackend {
pub fn new() -> NullAudioBackend {
NullAudioBackend {
streams: Arena::new(),
sounds: Arena::new(),
}
}
}
impl AudioBackend for NullAudioBackend {
fn register_sound(&mut self, _sound: &swf::Sound) -> Result<SoundHandle, Error> {
Ok(self.sounds.insert(()))
}
fn play_sound(&mut self, _sound: SoundHandle) {}
fn start_stream(
&mut self,
_clip_id: crate::prelude::CharacterId,
_clip_data: crate::tag_utils::SwfSlice,
_handle: &swf::SoundStreamHead,
) -> AudioStreamHandle {
self.streams.insert(())
}
}
impl Default for NullAudioBackend {
fn default() -> Self {
NullAudioBackend::new()
}
}

View File

@ -1,76 +1,76 @@
mod adpcm;
mod mp3;
pub use adpcm::AdpcmDecoder;
pub use mp3::Mp3Decoder;
pub trait Decoder: Iterator<Item = i16> {
fn num_channels(&self) -> u8;
fn sample_rate(&self) -> u16;
}
pub fn stream_tag_reader(
swf_data: crate::tag_utils::SwfSlice,
) -> IterRead<impl Iterator<Item = u8>> {
use std::io::{Cursor, Read};
use swf::TagCode;
let mut reader = swf::read::Reader::new(Cursor::new(swf_data), 8);
let mut audio_data = vec![];
let mut cur_byte = 0;
let mut frame = 1;
let iter = std::iter::from_fn(move || {
if cur_byte >= audio_data.len() {
cur_byte = 0;
let tag_callback =
|reader: &mut swf::read::Reader<Cursor<crate::tag_utils::SwfSlice>>,
tag_code,
tag_len| match tag_code {
TagCode::ShowFrame => {
frame += 1;
Ok(())
}
TagCode::SoundStreamBlock => {
audio_data.clear();
let mut data = vec![];
reader
.get_mut()
.take(tag_len as u64)
.read_to_end(&mut data)?;
audio_data.extend(data[4..].iter());
Ok(())
}
_ => Ok(()),
};
let _ =
crate::tag_utils::decode_tags(&mut reader, tag_callback, TagCode::SoundStreamBlock);
}
if cur_byte < audio_data.len() {
let byte = audio_data[cur_byte];
cur_byte += 1;
Some(byte)
} else {
None
}
});
IterRead(iter)
}
pub struct IterRead<I: Iterator<Item = u8>>(I);
impl<I: Iterator<Item = u8>> std::io::Read for IterRead<I> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let mut n = 0;
for out in buf {
if let Some(v) = self.0.next() {
*out = v;
n += 1;
} else {
break;
}
}
Ok(n)
}
}
mod adpcm;
mod mp3;
pub use adpcm::AdpcmDecoder;
pub use mp3::Mp3Decoder;
pub trait Decoder: Iterator<Item = i16> {
fn num_channels(&self) -> u8;
fn sample_rate(&self) -> u16;
}
pub fn stream_tag_reader(
swf_data: crate::tag_utils::SwfSlice,
) -> IterRead<impl Iterator<Item = u8>> {
use std::io::{Cursor, Read};
use swf::TagCode;
let mut reader = swf::read::Reader::new(Cursor::new(swf_data), 8);
let mut audio_data = vec![];
let mut cur_byte = 0;
let mut frame = 1;
let iter = std::iter::from_fn(move || {
if cur_byte >= audio_data.len() {
cur_byte = 0;
let tag_callback =
|reader: &mut swf::read::Reader<Cursor<crate::tag_utils::SwfSlice>>,
tag_code,
tag_len| match tag_code {
TagCode::ShowFrame => {
frame += 1;
Ok(())
}
TagCode::SoundStreamBlock => {
audio_data.clear();
let mut data = vec![];
reader
.get_mut()
.take(tag_len as u64)
.read_to_end(&mut data)?;
audio_data.extend(data[4..].iter());
Ok(())
}
_ => Ok(()),
};
let _ =
crate::tag_utils::decode_tags(&mut reader, tag_callback, TagCode::SoundStreamBlock);
}
if cur_byte < audio_data.len() {
let byte = audio_data[cur_byte];
cur_byte += 1;
Some(byte)
} else {
None
}
});
IterRead(iter)
}
pub struct IterRead<I: Iterator<Item = u8>>(I);
impl<I: Iterator<Item = u8>> std::io::Read for IterRead<I> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let mut n = 0;
for out in buf {
if let Some(v) = self.0.next() {
*out = v;
n += 1;
} else {
break;
}
}
Ok(n)
}
}

View File

@ -1,172 +1,172 @@
use super::Decoder;
use bitstream_io::{BigEndian, BitReader};
use std::io::Read;
pub struct AdpcmDecoder<R: Read> {
inner: BitReader<R, BigEndian>,
sample_rate: u16,
is_stereo: bool,
bits_per_sample: usize,
sample_num: u16,
left_sample: i32,
left_step_index: i16,
left_step: i32,
right_sample: i32,
right_step_index: i16,
right_step: i32,
cur_channel: u8,
}
impl<R: Read> AdpcmDecoder<R> {
const INDEX_TABLE: [&'static [i16]; 4] = [
&[-1, 2],
&[-1, -1, 2, 4],
&[-1, -1, -1, -1, 2, 4, 6, 8],
&[-1, -1, -1, -1, -1, -1, -1, -1, 1, 2, 4, 6, 8, 10, 13, 16],
];
const STEP_TABLE: [i32; 89] = [
7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45, 50, 55, 60,
66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230, 253, 279, 307, 337, 371,
408, 449, 494, 544, 598, 658, 724, 796, 876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878,
2066, 2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845,
8630, 9493, 10442, 11487, 12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086,
29794, 32767,
];
pub fn new(inner: R, is_stereo: bool, sample_rate: u16) -> Result<Self, std::io::Error> {
let mut reader = BitReader::new(inner);
let bits_per_sample = reader.read::<u8>(2)? as usize + 2;
let left_sample = 0;
let left_step_index = 0;
let left_step = 0;
let right_sample = 0;
let right_step_index = 0;
let right_step = 0;
Ok(Self {
inner: reader,
sample_rate,
is_stereo,
bits_per_sample,
sample_num: 0,
left_sample,
left_step,
left_step_index,
right_sample,
right_step,
right_step_index,
cur_channel: 2,
})
}
pub fn next_sample(&mut self) -> Result<(), std::io::Error> {
self.cur_channel = 0;
if self.sample_num == 0 {
// The initial sample values are NOT byte-aligned.
self.left_sample = self.inner.read_signed(16)?;
self.left_step_index = self.inner.read::<u16>(6)? as i16;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
if self.is_stereo {
self.right_sample = self.inner.read_signed(16)?;
self.right_step_index = self.inner.read::<u16>(6)? as i16;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
}
}
self.sample_num = (self.sample_num + 1) % 4095;
let data: i32 = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
// (data + 0.5) * step / 2^(bits_per_sample - 2)
// Data is sign-magnitude, NOT two's complement.
// TODO(Herschel): Other implementations use some bit-tricks for this.
let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.left_step / sign_mask;
if (data & sign_mask) != 0 {
self.left_sample -= delta;
} else {
self.left_sample += delta;
}
if self.left_sample < -32768 {
self.left_sample = 32768;
} else if self.left_sample > 32767 {
self.left_sample = 32767;
}
let i = magnitude as usize;
self.left_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.left_step_index < 0 {
self.left_step_index = 0;
} else if self.left_step_index >= Self::STEP_TABLE.len() as i16 {
self.left_step_index = Self::STEP_TABLE.len() as i16 - 1;
}
if self.is_stereo {
let data = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.right_step / sign_mask;
if (data & sign_mask) != 0 {
self.right_sample -= delta;
} else {
self.right_sample += delta;
}
if self.right_sample < -32768 {
self.right_sample = 32768;
} else if self.right_sample > 32767 {
self.right_sample = 32767;
}
let i = magnitude as usize;
self.right_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.right_step_index < 0 {
self.right_step_index = 0;
} else if self.right_step_index >= Self::STEP_TABLE.len() as i16 {
self.right_step_index = Self::STEP_TABLE.len() as i16 - 1;
}
}
Ok(())
}
}
impl<R: Read> Iterator for AdpcmDecoder<R> {
type Item = i16;
fn next(&mut self) -> Option<i16> {
if self.cur_channel >= if self.is_stereo { 2 } else { 1 } {
self.next_sample().ok()?;
}
let sample = if self.cur_channel == 0 {
self.left_sample
} else {
self.right_sample
};
self.cur_channel += 1;
Some(sample as i16)
}
}
impl<R: std::io::Read> Decoder for AdpcmDecoder<R> {
#[inline]
fn num_channels(&self) -> u8 {
if self.is_stereo {
2
} else {
1
}
}
#[inline]
fn sample_rate(&self) -> u16 {
self.sample_rate
}
}
use super::Decoder;
use bitstream_io::{BigEndian, BitReader};
use std::io::Read;
pub struct AdpcmDecoder<R: Read> {
inner: BitReader<R, BigEndian>,
sample_rate: u16,
is_stereo: bool,
bits_per_sample: usize,
sample_num: u16,
left_sample: i32,
left_step_index: i16,
left_step: i32,
right_sample: i32,
right_step_index: i16,
right_step: i32,
cur_channel: u8,
}
impl<R: Read> AdpcmDecoder<R> {
const INDEX_TABLE: [&'static [i16]; 4] = [
&[-1, 2],
&[-1, -1, 2, 4],
&[-1, -1, -1, -1, 2, 4, 6, 8],
&[-1, -1, -1, -1, -1, -1, -1, -1, 1, 2, 4, 6, 8, 10, 13, 16],
];
const STEP_TABLE: [i32; 89] = [
7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45, 50, 55, 60,
66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230, 253, 279, 307, 337, 371,
408, 449, 494, 544, 598, 658, 724, 796, 876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878,
2066, 2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845,
8630, 9493, 10442, 11487, 12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086,
29794, 32767,
];
pub fn new(inner: R, is_stereo: bool, sample_rate: u16) -> Result<Self, std::io::Error> {
let mut reader = BitReader::new(inner);
let bits_per_sample = reader.read::<u8>(2)? as usize + 2;
let left_sample = 0;
let left_step_index = 0;
let left_step = 0;
let right_sample = 0;
let right_step_index = 0;
let right_step = 0;
Ok(Self {
inner: reader,
sample_rate,
is_stereo,
bits_per_sample,
sample_num: 0,
left_sample,
left_step,
left_step_index,
right_sample,
right_step,
right_step_index,
cur_channel: 2,
})
}
pub fn next_sample(&mut self) -> Result<(), std::io::Error> {
self.cur_channel = 0;
if self.sample_num == 0 {
// The initial sample values are NOT byte-aligned.
self.left_sample = self.inner.read_signed(16)?;
self.left_step_index = self.inner.read::<u16>(6)? as i16;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
if self.is_stereo {
self.right_sample = self.inner.read_signed(16)?;
self.right_step_index = self.inner.read::<u16>(6)? as i16;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
}
}
self.sample_num = (self.sample_num + 1) % 4095;
let data: i32 = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.left_step = Self::STEP_TABLE[self.left_step_index as usize];
// (data + 0.5) * step / 2^(bits_per_sample - 2)
// Data is sign-magnitude, NOT two's complement.
// TODO(Herschel): Other implementations use some bit-tricks for this.
let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.left_step / sign_mask;
if (data & sign_mask) != 0 {
self.left_sample -= delta;
} else {
self.left_sample += delta;
}
if self.left_sample < -32768 {
self.left_sample = 32768;
} else if self.left_sample > 32767 {
self.left_sample = 32767;
}
let i = magnitude as usize;
self.left_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.left_step_index < 0 {
self.left_step_index = 0;
} else if self.left_step_index >= Self::STEP_TABLE.len() as i16 {
self.left_step_index = Self::STEP_TABLE.len() as i16 - 1;
}
if self.is_stereo {
let data = self.inner.read::<u32>(self.bits_per_sample as u32)? as i32;
self.right_step = Self::STEP_TABLE[self.right_step_index as usize];
let sign_mask = 1 << (self.bits_per_sample - 1);
let magnitude = data & !sign_mask;
let delta = (2 * magnitude + 1) * self.right_step / sign_mask;
if (data & sign_mask) != 0 {
self.right_sample -= delta;
} else {
self.right_sample += delta;
}
if self.right_sample < -32768 {
self.right_sample = 32768;
} else if self.right_sample > 32767 {
self.right_sample = 32767;
}
let i = magnitude as usize;
self.right_step_index += Self::INDEX_TABLE[self.bits_per_sample - 2][i];
if self.right_step_index < 0 {
self.right_step_index = 0;
} else if self.right_step_index >= Self::STEP_TABLE.len() as i16 {
self.right_step_index = Self::STEP_TABLE.len() as i16 - 1;
}
}
Ok(())
}
}
impl<R: Read> Iterator for AdpcmDecoder<R> {
type Item = i16;
fn next(&mut self) -> Option<i16> {
if self.cur_channel >= if self.is_stereo { 2 } else { 1 } {
self.next_sample().ok()?;
}
let sample = if self.cur_channel == 0 {
self.left_sample
} else {
self.right_sample
};
self.cur_channel += 1;
Some(sample as i16)
}
}
impl<R: std::io::Read> Decoder for AdpcmDecoder<R> {
#[inline]
fn num_channels(&self) -> u8 {
if self.is_stereo {
2
} else {
1
}
}
#[inline]
fn sample_rate(&self) -> u16 {
self.sample_rate
}
}

View File

@ -1,124 +1,124 @@
#[cfg(feature = "minimp3")]
#[allow(dead_code)]
pub struct Mp3Decoder<R: std::io::Read> {
decoder: minimp3::Decoder<R>,
sample_rate: u32,
num_channels: u16,
cur_frame: minimp3::Frame,
cur_sample: usize,
num_samples: usize,
}
#[cfg(feature = "minimp3")]
impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder {
decoder: minimp3::Decoder::new(reader),
num_channels,
sample_rate,
cur_frame: unsafe { std::mem::zeroed::<minimp3::Frame>() },
cur_sample: 0,
num_samples: 0,
}
}
fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() {
self.num_samples = frame.data.len();
self.cur_frame = frame;
} else {
self.num_samples = 0;
}
self.cur_sample = 0;
}
}
#[cfg(feature = "minimp3")]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.num_samples {
self.next_frame();
}
if self.num_samples > 0 {
let sample = self.cur_frame.data[self.cur_sample];
self.cur_sample += 1;
Some(sample)
} else {
None
}
}
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
pub struct Mp3Decoder<R: std::io::Read> {
decoder: puremp3::Mp3Decoder<R>,
sample_rate: u32,
num_channels: u16,
cur_frame: puremp3::Frame,
cur_sample: usize,
cur_channel: usize,
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder {
decoder: puremp3::Mp3Decoder::new(reader),
num_channels,
sample_rate,
cur_frame: unsafe { std::mem::zeroed::<puremp3::Frame>() },
cur_sample: 0,
cur_channel: 0,
}
}
fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() {
self.cur_frame = frame;
} else {
self.cur_frame.num_samples = 0;
}
self.cur_sample = 0;
self.cur_channel = 0;
}
}
impl<R: std::io::Read> super::Decoder for Mp3Decoder<R> {
#[inline]
fn num_channels(&self) -> u8 {
self.num_channels as u8
}
#[inline]
fn sample_rate(&self) -> u16 {
self.sample_rate as u16
}
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.cur_frame.num_samples {
self.next_frame();
}
if self.cur_frame.num_samples > 0 {
let sample = self.cur_frame.samples[self.cur_channel][self.cur_sample];
self.cur_channel += 1;
if self.cur_channel >= usize::from(self.num_channels) {
self.cur_channel = 0;
self.cur_sample += 1;
}
Some((sample * 32767.0) as i16)
} else {
None
}
}
}
#[cfg(feature = "minimp3")]
#[allow(dead_code)]
pub struct Mp3Decoder<R: std::io::Read> {
decoder: minimp3::Decoder<R>,
sample_rate: u32,
num_channels: u16,
cur_frame: minimp3::Frame,
cur_sample: usize,
num_samples: usize,
}
#[cfg(feature = "minimp3")]
impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder {
decoder: minimp3::Decoder::new(reader),
num_channels,
sample_rate,
cur_frame: unsafe { std::mem::zeroed::<minimp3::Frame>() },
cur_sample: 0,
num_samples: 0,
}
}
fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() {
self.num_samples = frame.data.len();
self.cur_frame = frame;
} else {
self.num_samples = 0;
}
self.cur_sample = 0;
}
}
#[cfg(feature = "minimp3")]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.num_samples {
self.next_frame();
}
if self.num_samples > 0 {
let sample = self.cur_frame.data[self.cur_sample];
self.cur_sample += 1;
Some(sample)
} else {
None
}
}
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
pub struct Mp3Decoder<R: std::io::Read> {
decoder: puremp3::Mp3Decoder<R>,
sample_rate: u32,
num_channels: u16,
cur_frame: puremp3::Frame,
cur_sample: usize,
cur_channel: usize,
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Mp3Decoder<R> {
pub fn new(num_channels: u16, sample_rate: u32, reader: R) -> Self {
Mp3Decoder {
decoder: puremp3::Mp3Decoder::new(reader),
num_channels,
sample_rate,
cur_frame: unsafe { std::mem::zeroed::<puremp3::Frame>() },
cur_sample: 0,
cur_channel: 0,
}
}
fn next_frame(&mut self) {
if let Ok(frame) = self.decoder.next_frame() {
self.cur_frame = frame;
} else {
self.cur_frame.num_samples = 0;
}
self.cur_sample = 0;
self.cur_channel = 0;
}
}
impl<R: std::io::Read> super::Decoder for Mp3Decoder<R> {
#[inline]
fn num_channels(&self) -> u8 {
self.num_channels as u8
}
#[inline]
fn sample_rate(&self) -> u16 {
self.sample_rate as u16
}
}
#[cfg(all(feature = "puremp3", not(feature = "minimp3")))]
impl<R: std::io::Read> Iterator for Mp3Decoder<R> {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
if self.cur_sample >= self.cur_frame.num_samples {
self.next_frame();
}
if self.cur_frame.num_samples > 0 {
let sample = self.cur_frame.samples[self.cur_channel][self.cur_sample];
self.cur_channel += 1;
if self.cur_channel >= usize::from(self.num_channels) {
self.cur_channel = 0;
self.cur_sample += 1;
}
Some((sample * 32767.0) as i16)
} else {
None
}
}
}

View File

@ -1,278 +1,278 @@
pub use crate::{transform::Transform, Color};
use std::io::Read;
pub use swf;
pub trait RenderBackend {
fn set_viewport_dimensions(&mut self, width: u32, height: u32);
fn register_shape(&mut self, shape: &swf::Shape) -> ShapeHandle;
fn register_glyph_shape(&mut self, shape: &swf::Glyph) -> ShapeHandle;
fn register_bitmap_jpeg(
&mut self,
id: swf::CharacterId,
data: &[u8],
jpeg_tables: &[u8],
) -> BitmapHandle;
fn register_bitmap_jpeg_2(&mut self, id: swf::CharacterId, data: &[u8]) -> BitmapHandle;
fn register_bitmap_jpeg_3(
&mut self,
id: swf::CharacterId,
jpeg_data: &[u8],
alpha_data: &[u8],
) -> BitmapHandle;
fn register_bitmap_png(&mut self, swf_tag: &swf::DefineBitsLossless) -> BitmapHandle;
fn begin_frame(&mut self);
fn clear(&mut self, color: Color);
fn render_shape(&mut self, shape: ShapeHandle, transform: &Transform);
fn end_frame(&mut self);
fn draw_pause_overlay(&mut self);
fn draw_letterbox(&mut self, letterbox: Letterbox);
}
#[derive(Copy, Clone, Debug)]
pub struct ShapeHandle(pub usize);
#[derive(Copy, Clone, Debug)]
pub struct BitmapHandle(pub usize);
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum Letterbox {
None,
Letterbox(f32),
Pillarbox(f32),
}
pub struct NullRenderer;
impl RenderBackend for NullRenderer {
fn set_viewport_dimensions(&mut self, _width: u32, _height: u32) {}
fn register_shape(&mut self, _shape: &swf::Shape) -> ShapeHandle {
ShapeHandle(0)
}
fn register_glyph_shape(&mut self, _shape: &swf::Glyph) -> ShapeHandle {
ShapeHandle(0)
}
fn register_bitmap_jpeg(
&mut self,
_id: swf::CharacterId,
_data: &[u8],
_jpeg_tables: &[u8],
) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_jpeg_2(&mut self, _id: swf::CharacterId, _data: &[u8]) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_jpeg_3(
&mut self,
_id: swf::CharacterId,
_data: &[u8],
_alpha_data: &[u8],
) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_png(&mut self, _swf_tag: &swf::DefineBitsLossless) -> BitmapHandle {
BitmapHandle(0)
}
fn begin_frame(&mut self) {}
fn end_frame(&mut self) {}
fn clear(&mut self, _color: Color) {}
fn render_shape(&mut self, _shape: ShapeHandle, _transform: &Transform) {}
fn draw_pause_overlay(&mut self) {}
fn draw_letterbox(&mut self, _letterbox: Letterbox) {}
}
pub fn glue_swf_jpeg_to_tables(jpeg_tables: &[u8], jpeg_data: &[u8]) -> Vec<u8> {
let mut full_jpeg = Vec::with_capacity(jpeg_tables.len() + jpeg_data.len() - 4);
full_jpeg.extend_from_slice(&jpeg_tables[..jpeg_tables.len() - 2]);
full_jpeg.extend_from_slice(&jpeg_data[2..]);
full_jpeg
}
/// Removes potential invalid JPEG data from SWF DefineBitsJPEG tags.
///
/// SWF19 p.138:
/// "Before version 8 of the SWF file format, SWF files could contain an erroneous header of 0xFF, 0xD9, 0xFF, 0xD8 before the JPEG SOI marker."
/// These bytes need to be removed for the JPEG to decode properly.
pub fn remove_invalid_jpeg_data(mut data: &[u8]) -> std::borrow::Cow<[u8]> {
// TODO: Might be better to return an Box<Iterator<Item=u8>> instead of a Cow here,
// where the spliced iter is a data[..n].chain(data[n+4..])?
if data[..4] == [0xFF, 0xD9, 0xFF, 0xD8] {
data = &data[4..];
}
if let Some(pos) = (0..data.len() - 4).find(|&n| data[n..n + 4] == [0xFF, 0xD9, 0xFF, 0xD8]) {
let mut out_data = Vec::with_capacity(data.len() - 4);
out_data.extend_from_slice(&data[..pos]);
out_data.extend_from_slice(&data[pos + 4..]);
std::borrow::Cow::from(out_data)
} else {
std::borrow::Cow::Borrowed(data)
}
}
/// Decodes a JPEG with optional alpha data.
///
pub fn define_bits_jpeg_to_rgba(
jpeg_data: &[u8],
alpha_data: &[u8],
) -> Result<(u32, u32, Vec<u8>), Box<dyn std::error::Error>> {
let jpeg_data = remove_invalid_jpeg_data(jpeg_data);
let mut decoder = jpeg_decoder::Decoder::new(&jpeg_data[..]);
decoder.read_info().unwrap();
let metadata = decoder.info().unwrap();
let decoded_data = decoder.decode().expect("failed to decode image");
// Decompress the alpha data (DEFLATE compression).
let alpha_data = {
let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(alpha_data)?;
decoder.read_to_end(&mut data)?;
data
};
let mut rgba = Vec::with_capacity((decoded_data.len() / 3) * 4);
let mut i = 0;
let mut a = 0;
while i < decoded_data.len() {
rgba.push(decoded_data[i]);
rgba.push(decoded_data[i + 1]);
rgba.push(decoded_data[i + 2]);
rgba.push(alpha_data[a]);
i += 3;
a += 1;
}
Ok((metadata.width.into(), metadata.height.into(), rgba))
}
/// Decodes the bitmap data in DefineBitsLossless tag into RGBA.
/// DefineBitsLossless is Zlib encoded pixel data (similar to PNG), possibly
/// palletized.
pub fn define_bits_lossless_to_rgba(
swf_tag: &swf::DefineBitsLossless,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
// Decompress the image data (DEFLATE compression).
let mut decoded_data = {
let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(&swf_tag.data[..])?;
decoder.read_to_end(&mut data)?;
data
};
// Swizzle/de-palettize the bitmap.
let out_data = match (swf_tag.version, swf_tag.format) {
(1, swf::BitmapFormat::Rgb15) => unimplemented!("15-bit PNG"),
(1, swf::BitmapFormat::Rgb32) => {
let mut i = 0;
while i < decoded_data.len() {
decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = 0xff;
i += 4;
}
decoded_data
}
(2, swf::BitmapFormat::Rgb32) => {
let mut i = 0;
while i < decoded_data.len() {
let alpha = decoded_data[i];
decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = alpha;
i += 4;
}
decoded_data
}
(1, swf::BitmapFormat::ColorMap8) => {
let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors {
palette.push(Color {
r: decoded_data[i],
g: decoded_data[i + 1],
b: decoded_data[i + 2],
a: 255,
});
i += 3;
}
let mut out_data = vec![];
for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize;
if entry < palette.len() {
let color = &palette[entry];
out_data.push(color.r);
out_data.push(color.g);
out_data.push(color.b);
out_data.push(color.a);
} else {
out_data.push(0);
out_data.push(0);
out_data.push(0);
out_data.push(255);
}
i += 1;
}
i += (padded_width - swf_tag.width) as usize;
}
out_data
}
(2, swf::BitmapFormat::ColorMap8) => {
let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors {
palette.push(Color {
r: decoded_data[i],
g: decoded_data[i + 1],
b: decoded_data[i + 2],
a: decoded_data[i + 3],
});
i += 4;
}
let mut out_data = vec![];
for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize;
if entry < palette.len() {
let color = &palette[entry];
out_data.push(color.r);
out_data.push(color.g);
out_data.push(color.b);
out_data.push(color.a);
} else {
out_data.push(0);
out_data.push(0);
out_data.push(0);
out_data.push(0);
}
i += 1;
}
i += (padded_width - swf_tag.width) as usize;
}
out_data
}
_ => unimplemented!("{:?} {:?}", swf_tag.version, swf_tag.format),
};
Ok(out_data)
}
/// Images in SWFs are stored with premultiplied alpha.
/// Converts RGBA premultiplied alpha to standard RBGA.
pub fn unmultiply_alpha_rgba(rgba: &mut [u8]) {
rgba.chunks_exact_mut(4).for_each(|rgba| {
if rgba[3] > 0 {
let a = f32::from(rgba[3]) / 255.0;
rgba[0] = (f32::from(rgba[0]) / a) as u8;
rgba[1] = (f32::from(rgba[1]) / a) as u8;
rgba[2] = (f32::from(rgba[2]) / a) as u8;
}
})
}
pub use crate::{transform::Transform, Color};
use std::io::Read;
pub use swf;
pub trait RenderBackend {
fn set_viewport_dimensions(&mut self, width: u32, height: u32);
fn register_shape(&mut self, shape: &swf::Shape) -> ShapeHandle;
fn register_glyph_shape(&mut self, shape: &swf::Glyph) -> ShapeHandle;
fn register_bitmap_jpeg(
&mut self,
id: swf::CharacterId,
data: &[u8],
jpeg_tables: &[u8],
) -> BitmapHandle;
fn register_bitmap_jpeg_2(&mut self, id: swf::CharacterId, data: &[u8]) -> BitmapHandle;
fn register_bitmap_jpeg_3(
&mut self,
id: swf::CharacterId,
jpeg_data: &[u8],
alpha_data: &[u8],
) -> BitmapHandle;
fn register_bitmap_png(&mut self, swf_tag: &swf::DefineBitsLossless) -> BitmapHandle;
fn begin_frame(&mut self);
fn clear(&mut self, color: Color);
fn render_shape(&mut self, shape: ShapeHandle, transform: &Transform);
fn end_frame(&mut self);
fn draw_pause_overlay(&mut self);
fn draw_letterbox(&mut self, letterbox: Letterbox);
}
#[derive(Copy, Clone, Debug)]
pub struct ShapeHandle(pub usize);
#[derive(Copy, Clone, Debug)]
pub struct BitmapHandle(pub usize);
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum Letterbox {
None,
Letterbox(f32),
Pillarbox(f32),
}
pub struct NullRenderer;
impl RenderBackend for NullRenderer {
fn set_viewport_dimensions(&mut self, _width: u32, _height: u32) {}
fn register_shape(&mut self, _shape: &swf::Shape) -> ShapeHandle {
ShapeHandle(0)
}
fn register_glyph_shape(&mut self, _shape: &swf::Glyph) -> ShapeHandle {
ShapeHandle(0)
}
fn register_bitmap_jpeg(
&mut self,
_id: swf::CharacterId,
_data: &[u8],
_jpeg_tables: &[u8],
) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_jpeg_2(&mut self, _id: swf::CharacterId, _data: &[u8]) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_jpeg_3(
&mut self,
_id: swf::CharacterId,
_data: &[u8],
_alpha_data: &[u8],
) -> BitmapHandle {
BitmapHandle(0)
}
fn register_bitmap_png(&mut self, _swf_tag: &swf::DefineBitsLossless) -> BitmapHandle {
BitmapHandle(0)
}
fn begin_frame(&mut self) {}
fn end_frame(&mut self) {}
fn clear(&mut self, _color: Color) {}
fn render_shape(&mut self, _shape: ShapeHandle, _transform: &Transform) {}
fn draw_pause_overlay(&mut self) {}
fn draw_letterbox(&mut self, _letterbox: Letterbox) {}
}
pub fn glue_swf_jpeg_to_tables(jpeg_tables: &[u8], jpeg_data: &[u8]) -> Vec<u8> {
let mut full_jpeg = Vec::with_capacity(jpeg_tables.len() + jpeg_data.len() - 4);
full_jpeg.extend_from_slice(&jpeg_tables[..jpeg_tables.len() - 2]);
full_jpeg.extend_from_slice(&jpeg_data[2..]);
full_jpeg
}
/// Removes potential invalid JPEG data from SWF DefineBitsJPEG tags.
///
/// SWF19 p.138:
/// "Before version 8 of the SWF file format, SWF files could contain an erroneous header of 0xFF, 0xD9, 0xFF, 0xD8 before the JPEG SOI marker."
/// These bytes need to be removed for the JPEG to decode properly.
pub fn remove_invalid_jpeg_data(mut data: &[u8]) -> std::borrow::Cow<[u8]> {
// TODO: Might be better to return an Box<Iterator<Item=u8>> instead of a Cow here,
// where the spliced iter is a data[..n].chain(data[n+4..])?
if data[..4] == [0xFF, 0xD9, 0xFF, 0xD8] {
data = &data[4..];
}
if let Some(pos) = (0..data.len() - 4).find(|&n| data[n..n + 4] == [0xFF, 0xD9, 0xFF, 0xD8]) {
let mut out_data = Vec::with_capacity(data.len() - 4);
out_data.extend_from_slice(&data[..pos]);
out_data.extend_from_slice(&data[pos + 4..]);
std::borrow::Cow::from(out_data)
} else {
std::borrow::Cow::Borrowed(data)
}
}
/// Decodes a JPEG with optional alpha data.
///
pub fn define_bits_jpeg_to_rgba(
jpeg_data: &[u8],
alpha_data: &[u8],
) -> Result<(u32, u32, Vec<u8>), Box<dyn std::error::Error>> {
let jpeg_data = remove_invalid_jpeg_data(jpeg_data);
let mut decoder = jpeg_decoder::Decoder::new(&jpeg_data[..]);
decoder.read_info().unwrap();
let metadata = decoder.info().unwrap();
let decoded_data = decoder.decode().expect("failed to decode image");
// Decompress the alpha data (DEFLATE compression).
let alpha_data = {
let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(alpha_data)?;
decoder.read_to_end(&mut data)?;
data
};
let mut rgba = Vec::with_capacity((decoded_data.len() / 3) * 4);
let mut i = 0;
let mut a = 0;
while i < decoded_data.len() {
rgba.push(decoded_data[i]);
rgba.push(decoded_data[i + 1]);
rgba.push(decoded_data[i + 2]);
rgba.push(alpha_data[a]);
i += 3;
a += 1;
}
Ok((metadata.width.into(), metadata.height.into(), rgba))
}
/// Decodes the bitmap data in DefineBitsLossless tag into RGBA.
/// DefineBitsLossless is Zlib encoded pixel data (similar to PNG), possibly
/// palletized.
pub fn define_bits_lossless_to_rgba(
swf_tag: &swf::DefineBitsLossless,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
// Decompress the image data (DEFLATE compression).
let mut decoded_data = {
let mut data = vec![];
let mut decoder = libflate::zlib::Decoder::new(&swf_tag.data[..])?;
decoder.read_to_end(&mut data)?;
data
};
// Swizzle/de-palettize the bitmap.
let out_data = match (swf_tag.version, swf_tag.format) {
(1, swf::BitmapFormat::Rgb15) => unimplemented!("15-bit PNG"),
(1, swf::BitmapFormat::Rgb32) => {
let mut i = 0;
while i < decoded_data.len() {
decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = 0xff;
i += 4;
}
decoded_data
}
(2, swf::BitmapFormat::Rgb32) => {
let mut i = 0;
while i < decoded_data.len() {
let alpha = decoded_data[i];
decoded_data[i] = decoded_data[i + 1];
decoded_data[i + 1] = decoded_data[i + 2];
decoded_data[i + 2] = decoded_data[i + 3];
decoded_data[i + 3] = alpha;
i += 4;
}
decoded_data
}
(1, swf::BitmapFormat::ColorMap8) => {
let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors {
palette.push(Color {
r: decoded_data[i],
g: decoded_data[i + 1],
b: decoded_data[i + 2],
a: 255,
});
i += 3;
}
let mut out_data = vec![];
for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize;
if entry < palette.len() {
let color = &palette[entry];
out_data.push(color.r);
out_data.push(color.g);
out_data.push(color.b);
out_data.push(color.a);
} else {
out_data.push(0);
out_data.push(0);
out_data.push(0);
out_data.push(255);
}
i += 1;
}
i += (padded_width - swf_tag.width) as usize;
}
out_data
}
(2, swf::BitmapFormat::ColorMap8) => {
let mut i = 0;
let padded_width = (swf_tag.width + 0b11) & !0b11;
let mut palette = Vec::with_capacity(swf_tag.num_colors as usize + 1);
for _ in 0..=swf_tag.num_colors {
palette.push(Color {
r: decoded_data[i],
g: decoded_data[i + 1],
b: decoded_data[i + 2],
a: decoded_data[i + 3],
});
i += 4;
}
let mut out_data = vec![];
for _ in 0..swf_tag.height {
for _ in 0..swf_tag.width {
let entry = decoded_data[i] as usize;
if entry < palette.len() {
let color = &palette[entry];
out_data.push(color.r);
out_data.push(color.g);
out_data.push(color.b);
out_data.push(color.a);
} else {
out_data.push(0);
out_data.push(0);
out_data.push(0);
out_data.push(0);
}
i += 1;
}
i += (padded_width - swf_tag.width) as usize;
}
out_data
}
_ => unimplemented!("{:?} {:?}", swf_tag.version, swf_tag.format),
};
Ok(out_data)
}
/// Images in SWFs are stored with premultiplied alpha.
/// Converts RGBA premultiplied alpha to standard RBGA.
pub fn unmultiply_alpha_rgba(rgba: &mut [u8]) {
rgba.chunks_exact_mut(4).for_each(|rgba| {
if rgba[3] > 0 {
let a = f32::from(rgba[3]) / 255.0;
rgba[0] = (f32::from(rgba[0]) / a) as u8;
rgba[1] = (f32::from(rgba[1]) / a) as u8;
rgba[2] = (f32::from(rgba[2]) / a) as u8;
}
})
}

View File

@ -1,74 +1,74 @@
use crate::matrix::Matrix;
use swf::Twips;
#[derive(Clone, Debug)]
pub struct BoundingBox {
pub x_min: Twips,
pub y_min: Twips,
pub x_max: Twips,
pub y_max: Twips,
pub valid: bool,
}
impl BoundingBox {
pub fn transform(&self, matrix: &Matrix) -> Self {
if !self.valid {
return Self::default();
}
use std::cmp::{max, min};
let pt0 = *matrix * (self.x_min, self.y_min);
let pt1 = *matrix * (self.x_min, self.y_max);
let pt2 = *matrix * (self.x_max, self.y_min);
let pt3 = *matrix * (self.x_max, self.y_max);
BoundingBox {
x_min: min(pt0.0, min(pt1.0, min(pt2.0, pt3.0))),
y_min: min(pt0.1, min(pt1.1, min(pt2.1, pt3.1))),
x_max: max(pt0.0, max(pt1.0, max(pt2.0, pt3.0))),
y_max: max(pt0.1, max(pt1.1, max(pt2.1, pt3.1))),
valid: true,
}
}
pub fn intersects(&self, other: &BoundingBox) -> bool {
if !self.valid || !other.valid {
return false;
}
use std::cmp::{max, min};
let x_min = max(self.x_min, other.x_min);
let y_min = max(self.y_min, other.y_min);
let x_max = min(self.x_max, other.x_max);
let y_max = min(self.y_max, other.y_max);
x_min <= x_max && y_min <= y_max
}
pub fn contains(&self, (x, y): (Twips, Twips)) -> bool {
self.valid && x >= self.x_min && x <= self.x_max && y >= self.y_min && y <= self.y_max
}
}
impl Default for BoundingBox {
fn default() -> Self {
Self {
x_min: Default::default(),
y_min: Default::default(),
x_max: Default::default(),
y_max: Default::default(),
valid: false,
}
}
}
impl From<swf::Rectangle> for BoundingBox {
fn from(rect: swf::Rectangle) -> Self {
Self {
x_min: rect.x_min,
y_min: rect.y_min,
x_max: rect.x_max,
y_max: rect.y_max,
valid: true,
}
}
}
use crate::matrix::Matrix;
use swf::Twips;
#[derive(Clone, Debug)]
pub struct BoundingBox {
pub x_min: Twips,
pub y_min: Twips,
pub x_max: Twips,
pub y_max: Twips,
pub valid: bool,
}
impl BoundingBox {
pub fn transform(&self, matrix: &Matrix) -> Self {
if !self.valid {
return Self::default();
}
use std::cmp::{max, min};
let pt0 = *matrix * (self.x_min, self.y_min);
let pt1 = *matrix * (self.x_min, self.y_max);
let pt2 = *matrix * (self.x_max, self.y_min);
let pt3 = *matrix * (self.x_max, self.y_max);
BoundingBox {
x_min: min(pt0.0, min(pt1.0, min(pt2.0, pt3.0))),
y_min: min(pt0.1, min(pt1.1, min(pt2.1, pt3.1))),
x_max: max(pt0.0, max(pt1.0, max(pt2.0, pt3.0))),
y_max: max(pt0.1, max(pt1.1, max(pt2.1, pt3.1))),
valid: true,
}
}
pub fn intersects(&self, other: &BoundingBox) -> bool {
if !self.valid || !other.valid {
return false;
}
use std::cmp::{max, min};
let x_min = max(self.x_min, other.x_min);
let y_min = max(self.y_min, other.y_min);
let x_max = min(self.x_max, other.x_max);
let y_max = min(self.y_max, other.y_max);
x_min <= x_max && y_min <= y_max
}
pub fn contains(&self, (x, y): (Twips, Twips)) -> bool {
self.valid && x >= self.x_min && x <= self.x_max && y >= self.y_min && y <= self.y_max
}
}
impl Default for BoundingBox {
fn default() -> Self {
Self {
x_min: Default::default(),
y_min: Default::default(),
x_max: Default::default(),
y_max: Default::default(),
valid: false,
}
}
}
impl From<swf::Rectangle> for BoundingBox {
fn from(rect: swf::Rectangle) -> Self {
Self {
x_min: rect.x_min,
y_min: rect.y_min,
x_max: rect.x_max,
y_max: rect.y_max,
valid: true,
}
}
}

View File

@ -1,301 +1,301 @@
use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::events::ButtonEvent;
use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*;
use std::collections::BTreeMap;
#[derive(Clone)]
pub struct Button<'gc> {
base: DisplayObjectBase<'gc>,
static_data: gc_arena::Gc<'gc, ButtonStatic>,
state: ButtonState,
children: [BTreeMap<Depth, DisplayNode<'gc>>; 4],
tracking: ButtonTracking,
}
const UP_STATE: usize = 0;
const OVER_STATE: usize = 1;
const DOWN_STATE: usize = 2;
const HIT_STATE: usize = 3;
impl<'gc> Button<'gc> {
pub fn from_swf_tag(
button: &swf::Button,
library: &crate::library::Library<'gc>,
gc_context: gc_arena::MutationContext<'gc, '_>,
) -> Self {
use swf::ButtonState;
let mut children = [
BTreeMap::new(),
BTreeMap::new(),
BTreeMap::new(),
BTreeMap::new(),
];
for record in &button.records {
match library.instantiate_display_object(record.id, gc_context) {
Ok(child) => {
child
.write(gc_context)
.set_matrix(&record.matrix.clone().into());
child
.write(gc_context)
.set_color_transform(&record.color_transform.clone().into());
for state in &record.states {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::HitTest => HIT_STATE,
};
children[i].insert(record.depth, child);
}
}
Err(error) => {
log::error!(
"Button ID {}: could not instantiate child ID {}: {}",
button.id,
record.id,
error
);
}
}
}
let mut actions = vec![];
for action in &button.actions {
let action_data = crate::tag_utils::SwfSlice {
data: std::sync::Arc::new(action.action_data.clone()),
start: 0,
end: action.action_data.len(),
};
for condition in &action.conditions {
let button_action = ButtonAction {
action_data: action_data.clone(),
condition: *condition,
key_code: action.key_code,
};
actions.push(button_action);
}
}
let static_data = ButtonStatic {
id: button.id,
actions,
};
Button {
base: Default::default(),
static_data: gc_arena::Gc::allocate(gc_context, static_data),
children,
state: self::ButtonState::Up,
tracking: if button.is_track_as_menu {
ButtonTracking::Menu
} else {
ButtonTracking::Push
},
}
}
fn children_in_state(
&self,
state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &DisplayNode<'gc>> {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE,
};
self.children[i].values()
}
fn children_in_state_mut(
&mut self,
state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &mut DisplayNode<'gc>> {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE,
};
self.children[i].values_mut()
}
pub fn handle_button_event(
&mut self,
context: &mut crate::player::UpdateContext<'_, 'gc, '_>,
event: ButtonEvent,
) {
let new_state = match event {
ButtonEvent::RollOut => ButtonState::Up,
ButtonEvent::RollOver => ButtonState::Over,
ButtonEvent::Press => ButtonState::Down,
ButtonEvent::Release => ButtonState::Over,
ButtonEvent::KeyPress(key) => {
self.run_actions(context, swf::ButtonActionCondition::KeyPress, Some(key));
self.state
}
};
match (self.state, new_state) {
(ButtonState::Up, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::IdleToOverUp, None);
}
(ButtonState::Over, ButtonState::Up) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToIdle, None);
}
(ButtonState::Over, ButtonState::Down) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToOverDown, None);
}
(ButtonState::Down, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::OverDownToOverUp, None);
}
_ => (),
}
self.state = new_state;
}
fn run_actions(
&mut self,
context: &mut UpdateContext<'_, 'gc, '_>,
condition: swf::ButtonActionCondition,
key_code: Option<u8>,
) {
if let Some(parent) = self.parent() {
for action in &self.static_data.actions {
if action.condition == condition && action.key_code == key_code {
// Note that AVM1 buttons run actions relative to their parent, not themselves.
context.actions.push((parent, action.action_data.clone()));
}
}
}
}
}
impl<'gc> DisplayObject<'gc> for Button<'gc> {
impl_display_object!(base);
fn run_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
// TODO: Set parent for all children. Yuck... Do this on creation instead.
for state in &mut self.children {
for child in state.values_mut() {
child
.write(context.gc_context)
.set_parent(Some(context.active_clip));
}
}
for child in self.children_in_state_mut(self.state) {
child
.write(context.gc_context)
.set_parent(Some(context.active_clip));
context.active_clip = *child;
child.write(context.gc_context).run_frame(context);
}
}
fn run_post_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
for child in self.children_in_state_mut(self.state) {
context.active_clip = *child;
child.write(context.gc_context).run_post_frame(context);
}
}
fn render(&self, context: &mut RenderContext<'_, 'gc>) {
context.transform_stack.push(self.transform());
for child in self.children_in_state(self.state) {
child.read().render(context);
}
context.transform_stack.pop();
}
fn hit_test(&self, point: (Twips, Twips)) -> bool {
// Use hit state to determine hit area; otherwise use current state.
let hit_state = if !self.children[HIT_STATE].is_empty() {
ButtonState::Hit
} else {
self.state
};
for child in self.children_in_state(hit_state).rev() {
if child.read().world_bounds().contains(point) {
return true;
}
}
false
}
fn mouse_pick(
&self,
self_node: DisplayNode<'gc>,
point: (Twips, Twips),
) -> Option<DisplayNode<'gc>> {
// The button is hovered if the mouse is over any child nodes.
if self.hit_test(point) {
Some(self_node)
} else {
None
}
}
fn as_button(&self) -> Option<&Self> {
Some(self)
}
fn as_button_mut(&mut self) -> Option<&mut Self> {
Some(self)
}
}
unsafe impl<'gc> gc_arena::Collect for Button<'gc> {
#[inline]
fn trace(&self, cc: gc_arena::CollectionContext) {
for state in &self.children {
for child in state.values() {
child.trace(cc);
}
}
self.base.trace(cc);
self.static_data.trace(cc);
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
enum ButtonState {
Up,
Over,
Down,
Hit,
}
#[derive(Clone)]
struct ButtonAction {
action_data: crate::tag_utils::SwfSlice,
condition: swf::ButtonActionCondition,
key_code: Option<u8>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ButtonTracking {
Push,
Menu,
}
/// Static data shared between all instances of a button.
#[allow(dead_code)]
#[derive(Clone)]
struct ButtonStatic {
id: CharacterId,
actions: Vec<ButtonAction>,
}
unsafe impl<'gc> gc_arena::Collect for ButtonStatic {
#[inline]
fn needs_trace() -> bool {
false
}
}
use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::events::ButtonEvent;
use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*;
use std::collections::BTreeMap;
#[derive(Clone)]
pub struct Button<'gc> {
base: DisplayObjectBase<'gc>,
static_data: gc_arena::Gc<'gc, ButtonStatic>,
state: ButtonState,
children: [BTreeMap<Depth, DisplayNode<'gc>>; 4],
tracking: ButtonTracking,
}
const UP_STATE: usize = 0;
const OVER_STATE: usize = 1;
const DOWN_STATE: usize = 2;
const HIT_STATE: usize = 3;
impl<'gc> Button<'gc> {
pub fn from_swf_tag(
button: &swf::Button,
library: &crate::library::Library<'gc>,
gc_context: gc_arena::MutationContext<'gc, '_>,
) -> Self {
use swf::ButtonState;
let mut children = [
BTreeMap::new(),
BTreeMap::new(),
BTreeMap::new(),
BTreeMap::new(),
];
for record in &button.records {
match library.instantiate_display_object(record.id, gc_context) {
Ok(child) => {
child
.write(gc_context)
.set_matrix(&record.matrix.clone().into());
child
.write(gc_context)
.set_color_transform(&record.color_transform.clone().into());
for state in &record.states {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::HitTest => HIT_STATE,
};
children[i].insert(record.depth, child);
}
}
Err(error) => {
log::error!(
"Button ID {}: could not instantiate child ID {}: {}",
button.id,
record.id,
error
);
}
}
}
let mut actions = vec![];
for action in &button.actions {
let action_data = crate::tag_utils::SwfSlice {
data: std::sync::Arc::new(action.action_data.clone()),
start: 0,
end: action.action_data.len(),
};
for condition in &action.conditions {
let button_action = ButtonAction {
action_data: action_data.clone(),
condition: *condition,
key_code: action.key_code,
};
actions.push(button_action);
}
}
let static_data = ButtonStatic {
id: button.id,
actions,
};
Button {
base: Default::default(),
static_data: gc_arena::Gc::allocate(gc_context, static_data),
children,
state: self::ButtonState::Up,
tracking: if button.is_track_as_menu {
ButtonTracking::Menu
} else {
ButtonTracking::Push
},
}
}
fn children_in_state(
&self,
state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &DisplayNode<'gc>> {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE,
};
self.children[i].values()
}
fn children_in_state_mut(
&mut self,
state: ButtonState,
) -> impl std::iter::DoubleEndedIterator<Item = &mut DisplayNode<'gc>> {
let i = match state {
ButtonState::Up => UP_STATE,
ButtonState::Over => OVER_STATE,
ButtonState::Down => DOWN_STATE,
ButtonState::Hit => HIT_STATE,
};
self.children[i].values_mut()
}
pub fn handle_button_event(
&mut self,
context: &mut crate::player::UpdateContext<'_, 'gc, '_>,
event: ButtonEvent,
) {
let new_state = match event {
ButtonEvent::RollOut => ButtonState::Up,
ButtonEvent::RollOver => ButtonState::Over,
ButtonEvent::Press => ButtonState::Down,
ButtonEvent::Release => ButtonState::Over,
ButtonEvent::KeyPress(key) => {
self.run_actions(context, swf::ButtonActionCondition::KeyPress, Some(key));
self.state
}
};
match (self.state, new_state) {
(ButtonState::Up, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::IdleToOverUp, None);
}
(ButtonState::Over, ButtonState::Up) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToIdle, None);
}
(ButtonState::Over, ButtonState::Down) => {
self.run_actions(context, swf::ButtonActionCondition::OverUpToOverDown, None);
}
(ButtonState::Down, ButtonState::Over) => {
self.run_actions(context, swf::ButtonActionCondition::OverDownToOverUp, None);
}
_ => (),
}
self.state = new_state;
}
fn run_actions(
&mut self,
context: &mut UpdateContext<'_, 'gc, '_>,
condition: swf::ButtonActionCondition,
key_code: Option<u8>,
) {
if let Some(parent) = self.parent() {
for action in &self.static_data.actions {
if action.condition == condition && action.key_code == key_code {
// Note that AVM1 buttons run actions relative to their parent, not themselves.
context.actions.push((parent, action.action_data.clone()));
}
}
}
}
}
impl<'gc> DisplayObject<'gc> for Button<'gc> {
impl_display_object!(base);
fn run_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
// TODO: Set parent for all children. Yuck... Do this on creation instead.
for state in &mut self.children {
for child in state.values_mut() {
child
.write(context.gc_context)
.set_parent(Some(context.active_clip));
}
}
for child in self.children_in_state_mut(self.state) {
child
.write(context.gc_context)
.set_parent(Some(context.active_clip));
context.active_clip = *child;
child.write(context.gc_context).run_frame(context);
}
}
fn run_post_frame(&mut self, context: &mut UpdateContext<'_, 'gc, '_>) {
for child in self.children_in_state_mut(self.state) {
context.active_clip = *child;
child.write(context.gc_context).run_post_frame(context);
}
}
fn render(&self, context: &mut RenderContext<'_, 'gc>) {
context.transform_stack.push(self.transform());
for child in self.children_in_state(self.state) {
child.read().render(context);
}
context.transform_stack.pop();
}
fn hit_test(&self, point: (Twips, Twips)) -> bool {
// Use hit state to determine hit area; otherwise use current state.
let hit_state = if !self.children[HIT_STATE].is_empty() {
ButtonState::Hit
} else {
self.state
};
for child in self.children_in_state(hit_state).rev() {
if child.read().world_bounds().contains(point) {
return true;
}
}
false
}
fn mouse_pick(
&self,
self_node: DisplayNode<'gc>,
point: (Twips, Twips),
) -> Option<DisplayNode<'gc>> {
// The button is hovered if the mouse is over any child nodes.
if self.hit_test(point) {
Some(self_node)
} else {
None
}
}
fn as_button(&self) -> Option<&Self> {
Some(self)
}
fn as_button_mut(&mut self) -> Option<&mut Self> {
Some(self)
}
}
unsafe impl<'gc> gc_arena::Collect for Button<'gc> {
#[inline]
fn trace(&self, cc: gc_arena::CollectionContext) {
for state in &self.children {
for child in state.values() {
child.trace(cc);
}
}
self.base.trace(cc);
self.static_data.trace(cc);
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
enum ButtonState {
Up,
Over,
Down,
Hit,
}
#[derive(Clone)]
struct ButtonAction {
action_data: crate::tag_utils::SwfSlice,
condition: swf::ButtonActionCondition,
key_code: Option<u8>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ButtonTracking {
Push,
Menu,
}
/// Static data shared between all instances of a button.
#[allow(dead_code)]
#[derive(Clone)]
struct ButtonStatic {
id: CharacterId,
actions: Vec<ButtonAction>,
}
unsafe impl<'gc> gc_arena::Collect for ButtonStatic {
#[inline]
fn needs_trace() -> bool {
false
}
}

View File

@ -1,88 +1,88 @@
#[derive(Copy, Clone, Debug)]
pub struct ColorTransform {
pub r_mult: f32,
pub g_mult: f32,
pub b_mult: f32,
pub a_mult: f32,
pub r_add: f32,
pub g_add: f32,
pub b_add: f32,
pub a_add: f32,
}
impl From<swf::ColorTransform> for ColorTransform {
fn from(color_transform: swf::ColorTransform) -> ColorTransform {
ColorTransform {
r_mult: color_transform.r_multiply,
g_mult: color_transform.g_multiply,
b_mult: color_transform.b_multiply,
a_mult: color_transform.a_multiply,
r_add: f32::from(color_transform.r_add) / 255.0,
g_add: f32::from(color_transform.g_add) / 255.0,
b_add: f32::from(color_transform.b_add) / 255.0,
a_add: f32::from(color_transform.a_add) / 255.0,
}
}
}
impl ColorTransform {
#[allow(clippy::float_cmp)]
pub fn is_identity(&self) -> bool {
self.r_mult == 1.0
&& self.g_mult == 1.0
&& self.b_mult == 1.0
&& self.a_mult == 1.0
&& self.r_add == 0.0
&& self.g_add == 0.0
&& self.b_add == 0.0
&& self.a_add == 0.0
}
}
impl std::default::Default for ColorTransform {
fn default() -> ColorTransform {
ColorTransform {
r_mult: 1.0,
b_mult: 1.0,
g_mult: 1.0,
a_mult: 1.0,
r_add: 0.0,
b_add: 0.0,
g_add: 0.0,
a_add: 0.0,
}
}
}
impl std::ops::Mul for ColorTransform {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.g_add + self.g_add,
b_add: self.b_mult * rhs.b_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}
impl std::ops::MulAssign for ColorTransform {
fn mul_assign(&mut self, rhs: Self) {
*self = ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.b_add + self.g_add,
b_add: self.b_mult * rhs.g_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct ColorTransform {
pub r_mult: f32,
pub g_mult: f32,
pub b_mult: f32,
pub a_mult: f32,
pub r_add: f32,
pub g_add: f32,
pub b_add: f32,
pub a_add: f32,
}
impl From<swf::ColorTransform> for ColorTransform {
fn from(color_transform: swf::ColorTransform) -> ColorTransform {
ColorTransform {
r_mult: color_transform.r_multiply,
g_mult: color_transform.g_multiply,
b_mult: color_transform.b_multiply,
a_mult: color_transform.a_multiply,
r_add: f32::from(color_transform.r_add) / 255.0,
g_add: f32::from(color_transform.g_add) / 255.0,
b_add: f32::from(color_transform.b_add) / 255.0,
a_add: f32::from(color_transform.a_add) / 255.0,
}
}
}
impl ColorTransform {
#[allow(clippy::float_cmp)]
pub fn is_identity(&self) -> bool {
self.r_mult == 1.0
&& self.g_mult == 1.0
&& self.b_mult == 1.0
&& self.a_mult == 1.0
&& self.r_add == 0.0
&& self.g_add == 0.0
&& self.b_add == 0.0
&& self.a_add == 0.0
}
}
impl std::default::Default for ColorTransform {
fn default() -> ColorTransform {
ColorTransform {
r_mult: 1.0,
b_mult: 1.0,
g_mult: 1.0,
a_mult: 1.0,
r_add: 0.0,
b_add: 0.0,
g_add: 0.0,
a_add: 0.0,
}
}
}
impl std::ops::Mul for ColorTransform {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.g_add + self.g_add,
b_add: self.b_mult * rhs.b_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}
impl std::ops::MulAssign for ColorTransform {
fn mul_assign(&mut self, rhs: Self) {
*self = ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.b_add + self.g_add,
b_add: self.b_mult * rhs.g_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}

View File

@ -1,29 +1,29 @@
#[allow(clippy::enum_variant_names)]
#[derive(Debug)]
pub enum PlayerEvent {
MouseMove { x: f64, y: f64 },
MouseUp { x: f64, y: f64 },
MouseDown { x: f64, y: f64 },
MouseLeft,
}
/// The events that an AVM1 button can fire.
///
/// In Flash, these are created using `on` code on the button instance:
/// ```ignore
/// on(release) {
/// trace("Button clicked");
/// }
/// ```
#[derive(Debug)]
pub enum ButtonEvent {
Press,
Release,
RollOut,
RollOver,
KeyPress(KeyCode),
}
/// Flash virtual keycode.
/// TODO: This will eventually move to a separate module.
pub type KeyCode = u8;
#[allow(clippy::enum_variant_names)]
#[derive(Debug)]
pub enum PlayerEvent {
MouseMove { x: f64, y: f64 },
MouseUp { x: f64, y: f64 },
MouseDown { x: f64, y: f64 },
MouseLeft,
}
/// The events that an AVM1 button can fire.
///
/// In Flash, these are created using `on` code on the button instance:
/// ```ignore
/// on(release) {
/// trace("Button clicked");
/// }
/// ```
#[derive(Debug)]
pub enum ButtonEvent {
Press,
Release,
RollOut,
RollOver,
KeyPress(KeyCode),
}
/// Flash virtual keycode.
/// TODO: This will eventually move to a separate module.
pub type KeyCode = u8;

View File

@ -1,411 +1,411 @@
use crate::backend::render::{RenderBackend, ShapeHandle};
use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*;
use std::collections::HashMap;
use swf::Twips;
#[derive(Clone)]
pub struct MorphShape<'gc> {
base: DisplayObjectBase<'gc>,
start: swf::MorphShape,
end: swf::MorphShape,
frames: HashMap<u16, ShapeHandle>,
ratio: u16,
}
impl<'gc> MorphShape<'gc> {
pub fn from_swf_tag(swf_tag: &swf::DefineMorphShape, renderer: &mut dyn RenderBackend) -> Self {
// Convert the MorphShape into a normal Shape.
// TODO(Herschel): impl From in swf crate?
let mut morph_shape = Self {
start: swf_tag.start.clone(),
end: swf_tag.end.clone(),
base: Default::default(),
frames: HashMap::new(),
ratio: 0,
};
morph_shape.register_ratio(renderer, 0);
morph_shape.register_ratio(renderer, 65535);
morph_shape
}
pub fn register_ratio(&mut self, renderer: &mut dyn RenderBackend, ratio: u16) {
if self.frames.contains_key(&ratio) {
// Already registered.
return;
}
// Interpolate MorphShapes into a Shape.
use swf::{FillStyle, Gradient, LineStyle, ShapeRecord, ShapeStyles};
// Start shape is ratio 65535, end shape is ratio 0.
let b = f32::from(ratio) / 65535.0;
let a = 1.0 - b;
let fill_styles: Vec<FillStyle> = self
.start
.fill_styles
.iter()
.zip(self.end.fill_styles.iter())
.map(|(start, end)| match (start, end) {
(FillStyle::Color(start), FillStyle::Color(end)) => FillStyle::Color(Color {
r: (a * f32::from(start.r) + b * f32::from(end.r)) as u8,
g: (a * f32::from(start.g) + b * f32::from(end.g)) as u8,
b: (a * f32::from(start.b) + b * f32::from(end.b)) as u8,
a: (a * f32::from(start.a) + b * f32::from(end.a)) as u8,
}),
(FillStyle::LinearGradient(start), FillStyle::LinearGradient(end)) => {
let records: Vec<swf::GradientRecord> = start
.records
.iter()
.zip(end.records.iter())
.map(|(start, end)| swf::GradientRecord {
ratio: (f32::from(start.ratio) * a + f32::from(end.ratio) * b) as u8,
color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r))
as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g))
as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b))
as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a))
as u8,
},
})
.collect();
FillStyle::LinearGradient(Gradient {
matrix: start.matrix.clone(),
spread: start.spread,
interpolation: start.interpolation,
records,
})
}
_ => {
log::info!("Unhandled morph shape combination: {:?} {:?}", start, end);
start.clone()
}
})
.collect();
let line_styles: Vec<LineStyle> = self
.start
.line_styles
.iter()
.zip(self.end.line_styles.iter())
.map(|(start, end)| LineStyle {
width: Twips::new(
((start.width.get() as f32) * a + (end.width.get() as f32) * b) as i32,
),
color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r)) as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g)) as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b)) as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a)) as u8,
},
start_cap: start.start_cap,
end_cap: start.end_cap,
join_style: start.join_style,
fill_style: None,
allow_scale_x: start.allow_scale_x,
allow_scale_y: start.allow_scale_y,
is_pixel_hinted: start.is_pixel_hinted,
allow_close: start.allow_close,
})
.collect();
let mut shape = Vec::with_capacity(self.start.shape.len());
let mut start_iter = self.start.shape.iter();
let mut end_iter = self.end.shape.iter();
let mut start = start_iter.next();
let mut end = end_iter.next();
let mut start_x = Twips::new(0);
let mut start_y = Twips::new(0);
let mut end_x = Twips::new(0);
let mut end_y = Twips::new(0);
// TODO: Feels like this could be cleaned up a bit.
// We step through both the start records and end records, interpolating edges pairwise.
// Fill style/line style changes should only appear in the start records.
// However, StyleChangeRecord move_to can appear it both start and end records,
// and not necessarily in matching pairs; therefore, we have to keep track of the pen position
// in case one side is missing a move_to; it will implicitly use the last pen position.
while let (Some(s), Some(e)) = (start, end) {
match (s, e) {
(ShapeRecord::StyleChange(start_change), ShapeRecord::StyleChange(end_change)) => {
let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to {
if let Some((e_x, e_y)) = end_change.move_to {
start_x = s_x;
start_y = s_y;
end_x = e_x;
end_y = e_y;
style_change.move_to = Some((
Twips::new(
(start_x.get() as f32 * a + end_x.get() as f32 * b) as i32,
),
Twips::new(
(start_y.get() as f32 * a + end_y.get() as f32 * b) as i32,
),
));
} else {
panic!("Expected move_to for morph shape")
}
}
shape.push(ShapeRecord::StyleChange(style_change));
start = start_iter.next();
end = end_iter.next();
}
(ShapeRecord::StyleChange(start_change), _) => {
let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to {
start_x = s_x;
start_y = s_y;
style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
));
}
shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut start_x, &mut start_y, s);
start = start_iter.next();
}
(_, ShapeRecord::StyleChange(end_change)) => {
let mut style_change = end_change.clone();
if let Some((e_x, e_y)) = end_change.move_to {
end_x = e_x;
end_y = e_y;
style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
));
}
shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut end_x, &mut end_y, s);
end = end_iter.next();
continue;
}
_ => {
shape.push(Self::interpolate_edges(s, e, a));
Self::update_pos(&mut start_x, &mut start_y, s);
Self::update_pos(&mut end_x, &mut end_y, e);
start = start_iter.next();
end = end_iter.next();
}
}
}
let styles = ShapeStyles {
fill_styles,
line_styles,
};
let bounds = crate::shape_utils::calculate_shape_bounds(&shape[..]);
let shape = swf::Shape {
version: 4,
id: 0,
shape_bounds: bounds.clone(),
edge_bounds: bounds,
has_fill_winding_rule: false,
has_non_scaling_strokes: false,
has_scaling_strokes: true,
styles,
shape,
};
let shape_handle = renderer.register_shape(&shape);
self.frames.insert(ratio, shape_handle);
}
fn update_pos(x: &mut Twips, y: &mut Twips, record: &swf::ShapeRecord) {
use swf::ShapeRecord;
match record {
ShapeRecord::StraightEdge { delta_x, delta_y } => {
*x += *delta_x;
*y += *delta_y;
}
ShapeRecord::CurvedEdge {
control_delta_x,
control_delta_y,
anchor_delta_x,
anchor_delta_y,
} => {
*x += *control_delta_x + *anchor_delta_x;
*y += *control_delta_y + *anchor_delta_y;
}
ShapeRecord::StyleChange(ref style_change) => {
if let Some((move_x, move_y)) = style_change.move_to {
*x = move_x;
*y = move_y;
}
}
}
}
fn interpolate_edges(
start: &swf::ShapeRecord,
end: &swf::ShapeRecord,
a: f32,
) -> swf::ShapeRecord {
use swf::ShapeRecord;
let b = 1.0 - a;
match (start, end) {
(
ShapeRecord::StraightEdge {
delta_x: start_dx,
delta_y: start_dy,
},
ShapeRecord::StraightEdge {
delta_x: end_dx,
delta_y: end_dy,
},
) => ShapeRecord::StraightEdge {
delta_x: Twips::new((start_dx.get() as f32 * a + end_dx.get() as f32 * b) as i32),
delta_y: Twips::new((start_dy.get() as f32 * a + end_dy.get() as f32 * b) as i32),
},
(
ShapeRecord::CurvedEdge {
control_delta_x: start_cdx,
control_delta_y: start_cdy,
anchor_delta_x: start_adx,
anchor_delta_y: start_ady,
},
ShapeRecord::CurvedEdge {
control_delta_x: end_cdx,
control_delta_y: end_cdy,
anchor_delta_x: end_adx,
anchor_delta_y: end_ady,
},
) => ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
},
(
ShapeRecord::StraightEdge {
delta_x: start_dx,
delta_y: start_dy,
},
ShapeRecord::CurvedEdge {
control_delta_x: end_cdx,
control_delta_y: end_cdy,
anchor_delta_x: end_adx,
anchor_delta_y: end_ady,
},
) => {
let start_cdx = *start_dx / 2;
let start_cdy = *start_dy / 2;
let start_adx = start_cdx;
let start_ady = start_cdy;
ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
}
}
(
ShapeRecord::CurvedEdge {
control_delta_x: start_cdx,
control_delta_y: start_cdy,
anchor_delta_x: start_adx,
anchor_delta_y: start_ady,
},
ShapeRecord::StraightEdge {
delta_x: end_dx,
delta_y: end_dy,
},
) => {
let end_cdx = *end_dx / 2;
let end_cdy = *end_dy / 2;
let end_adx = end_cdx;
let end_ady = end_cdy;
ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
}
}
_ => unreachable!("{:?} {:?}", start, end),
}
}
pub fn ratio(&self) -> u16 {
self.ratio
}
pub fn set_ratio(&mut self, ratio: u16) {
self.ratio = ratio;
}
}
impl<'gc> DisplayObject<'gc> for MorphShape<'gc> {
impl_display_object!(base);
fn as_morph_shape(&self) -> Option<&Self> {
Some(self)
}
fn as_morph_shape_mut(&mut self) -> Option<&mut Self> {
Some(self)
}
fn run_frame(&mut self, context: &mut UpdateContext) {
if !self.frames.contains_key(&self.ratio) {
self.register_ratio(context.renderer, self.ratio);
}
}
fn render(&self, context: &mut RenderContext) {
context.transform_stack.push(self.transform());
if let Some(shape) = self.frames.get(&self.ratio) {
context
.renderer
.render_shape(*shape, context.transform_stack.transform());
} else {
warn!("Missing ratio for morph shape");
}
context.transform_stack.pop();
}
}
unsafe impl<'gc> gc_arena::Collect for MorphShape<'gc> {
#[inline]
fn trace(&self, cc: gc_arena::CollectionContext) {
self.base.trace(cc);
}
}
use crate::backend::render::{RenderBackend, ShapeHandle};
use crate::display_object::{DisplayObject, DisplayObjectBase};
use crate::player::{RenderContext, UpdateContext};
use crate::prelude::*;
use std::collections::HashMap;
use swf::Twips;
#[derive(Clone)]
pub struct MorphShape<'gc> {
base: DisplayObjectBase<'gc>,
start: swf::MorphShape,
end: swf::MorphShape,
frames: HashMap<u16, ShapeHandle>,
ratio: u16,
}
impl<'gc> MorphShape<'gc> {
pub fn from_swf_tag(swf_tag: &swf::DefineMorphShape, renderer: &mut dyn RenderBackend) -> Self {
// Convert the MorphShape into a normal Shape.
// TODO(Herschel): impl From in swf crate?
let mut morph_shape = Self {
start: swf_tag.start.clone(),
end: swf_tag.end.clone(),
base: Default::default(),
frames: HashMap::new(),
ratio: 0,
};
morph_shape.register_ratio(renderer, 0);
morph_shape.register_ratio(renderer, 65535);
morph_shape
}
pub fn register_ratio(&mut self, renderer: &mut dyn RenderBackend, ratio: u16) {
if self.frames.contains_key(&ratio) {
// Already registered.
return;
}
// Interpolate MorphShapes into a Shape.
use swf::{FillStyle, Gradient, LineStyle, ShapeRecord, ShapeStyles};
// Start shape is ratio 65535, end shape is ratio 0.
let b = f32::from(ratio) / 65535.0;
let a = 1.0 - b;
let fill_styles: Vec<FillStyle> = self
.start
.fill_styles
.iter()
.zip(self.end.fill_styles.iter())
.map(|(start, end)| match (start, end) {
(FillStyle::Color(start), FillStyle::Color(end)) => FillStyle::Color(Color {
r: (a * f32::from(start.r) + b * f32::from(end.r)) as u8,
g: (a * f32::from(start.g) + b * f32::from(end.g)) as u8,
b: (a * f32::from(start.b) + b * f32::from(end.b)) as u8,
a: (a * f32::from(start.a) + b * f32::from(end.a)) as u8,
}),
(FillStyle::LinearGradient(start), FillStyle::LinearGradient(end)) => {
let records: Vec<swf::GradientRecord> = start
.records
.iter()
.zip(end.records.iter())
.map(|(start, end)| swf::GradientRecord {
ratio: (f32::from(start.ratio) * a + f32::from(end.ratio) * b) as u8,
color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r))
as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g))
as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b))
as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a))
as u8,
},
})
.collect();
FillStyle::LinearGradient(Gradient {
matrix: start.matrix.clone(),
spread: start.spread,
interpolation: start.interpolation,
records,
})
}
_ => {
log::info!("Unhandled morph shape combination: {:?} {:?}", start, end);
start.clone()
}
})
.collect();
let line_styles: Vec<LineStyle> = self
.start
.line_styles
.iter()
.zip(self.end.line_styles.iter())
.map(|(start, end)| LineStyle {
width: Twips::new(
((start.width.get() as f32) * a + (end.width.get() as f32) * b) as i32,
),
color: Color {
r: (a * f32::from(start.color.r) + b * f32::from(end.color.r)) as u8,
g: (a * f32::from(start.color.g) + b * f32::from(end.color.g)) as u8,
b: (a * f32::from(start.color.b) + b * f32::from(end.color.b)) as u8,
a: (a * f32::from(start.color.a) + b * f32::from(end.color.a)) as u8,
},
start_cap: start.start_cap,
end_cap: start.end_cap,
join_style: start.join_style,
fill_style: None,
allow_scale_x: start.allow_scale_x,
allow_scale_y: start.allow_scale_y,
is_pixel_hinted: start.is_pixel_hinted,
allow_close: start.allow_close,
})
.collect();
let mut shape = Vec::with_capacity(self.start.shape.len());
let mut start_iter = self.start.shape.iter();
let mut end_iter = self.end.shape.iter();
let mut start = start_iter.next();
let mut end = end_iter.next();
let mut start_x = Twips::new(0);
let mut start_y = Twips::new(0);
let mut end_x = Twips::new(0);
let mut end_y = Twips::new(0);
// TODO: Feels like this could be cleaned up a bit.
// We step through both the start records and end records, interpolating edges pairwise.
// Fill style/line style changes should only appear in the start records.
// However, StyleChangeRecord move_to can appear it both start and end records,
// and not necessarily in matching pairs; therefore, we have to keep track of the pen position
// in case one side is missing a move_to; it will implicitly use the last pen position.
while let (Some(s), Some(e)) = (start, end) {
match (s, e) {
(ShapeRecord::StyleChange(start_change), ShapeRecord::StyleChange(end_change)) => {
let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to {
if let Some((e_x, e_y)) = end_change.move_to {
start_x = s_x;
start_y = s_y;
end_x = e_x;
end_y = e_y;
style_change.move_to = Some((
Twips::new(
(start_x.get() as f32 * a + end_x.get() as f32 * b) as i32,
),
Twips::new(
(start_y.get() as f32 * a + end_y.get() as f32 * b) as i32,
),
));
} else {
panic!("Expected move_to for morph shape")
}
}
shape.push(ShapeRecord::StyleChange(style_change));
start = start_iter.next();
end = end_iter.next();
}
(ShapeRecord::StyleChange(start_change), _) => {
let mut style_change = start_change.clone();
if let Some((s_x, s_y)) = start_change.move_to {
start_x = s_x;
start_y = s_y;
style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
));
}
shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut start_x, &mut start_y, s);
start = start_iter.next();
}
(_, ShapeRecord::StyleChange(end_change)) => {
let mut style_change = end_change.clone();
if let Some((e_x, e_y)) = end_change.move_to {
end_x = e_x;
end_y = e_y;
style_change.move_to = Some((
Twips::new((start_x.get() as f32 * a + end_x.get() as f32 * b) as i32),
Twips::new((start_y.get() as f32 * a + end_y.get() as f32 * b) as i32),
));
}
shape.push(ShapeRecord::StyleChange(style_change));
Self::update_pos(&mut end_x, &mut end_y, s);
end = end_iter.next();
continue;
}
_ => {
shape.push(Self::interpolate_edges(s, e, a));
Self::update_pos(&mut start_x, &mut start_y, s);
Self::update_pos(&mut end_x, &mut end_y, e);
start = start_iter.next();
end = end_iter.next();
}
}
}
let styles = ShapeStyles {
fill_styles,
line_styles,
};
let bounds = crate::shape_utils::calculate_shape_bounds(&shape[..]);
let shape = swf::Shape {
version: 4,
id: 0,
shape_bounds: bounds.clone(),
edge_bounds: bounds,
has_fill_winding_rule: false,
has_non_scaling_strokes: false,
has_scaling_strokes: true,
styles,
shape,
};
let shape_handle = renderer.register_shape(&shape);
self.frames.insert(ratio, shape_handle);
}
fn update_pos(x: &mut Twips, y: &mut Twips, record: &swf::ShapeRecord) {
use swf::ShapeRecord;
match record {
ShapeRecord::StraightEdge { delta_x, delta_y } => {
*x += *delta_x;
*y += *delta_y;
}
ShapeRecord::CurvedEdge {
control_delta_x,
control_delta_y,
anchor_delta_x,
anchor_delta_y,
} => {
*x += *control_delta_x + *anchor_delta_x;
*y += *control_delta_y + *anchor_delta_y;
}
ShapeRecord::StyleChange(ref style_change) => {
if let Some((move_x, move_y)) = style_change.move_to {
*x = move_x;
*y = move_y;
}
}
}
}
fn interpolate_edges(
start: &swf::ShapeRecord,
end: &swf::ShapeRecord,
a: f32,
) -> swf::ShapeRecord {
use swf::ShapeRecord;
let b = 1.0 - a;
match (start, end) {
(
ShapeRecord::StraightEdge {
delta_x: start_dx,
delta_y: start_dy,
},
ShapeRecord::StraightEdge {
delta_x: end_dx,
delta_y: end_dy,
},
) => ShapeRecord::StraightEdge {
delta_x: Twips::new((start_dx.get() as f32 * a + end_dx.get() as f32 * b) as i32),
delta_y: Twips::new((start_dy.get() as f32 * a + end_dy.get() as f32 * b) as i32),
},
(
ShapeRecord::CurvedEdge {
control_delta_x: start_cdx,
control_delta_y: start_cdy,
anchor_delta_x: start_adx,
anchor_delta_y: start_ady,
},
ShapeRecord::CurvedEdge {
control_delta_x: end_cdx,
control_delta_y: end_cdy,
anchor_delta_x: end_adx,
anchor_delta_y: end_ady,
},
) => ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
},
(
ShapeRecord::StraightEdge {
delta_x: start_dx,
delta_y: start_dy,
},
ShapeRecord::CurvedEdge {
control_delta_x: end_cdx,
control_delta_y: end_cdy,
anchor_delta_x: end_adx,
anchor_delta_y: end_ady,
},
) => {
let start_cdx = *start_dx / 2;
let start_cdy = *start_dy / 2;
let start_adx = start_cdx;
let start_ady = start_cdy;
ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
}
}
(
ShapeRecord::CurvedEdge {
control_delta_x: start_cdx,
control_delta_y: start_cdy,
anchor_delta_x: start_adx,
anchor_delta_y: start_ady,
},
ShapeRecord::StraightEdge {
delta_x: end_dx,
delta_y: end_dy,
},
) => {
let end_cdx = *end_dx / 2;
let end_cdy = *end_dy / 2;
let end_adx = end_cdx;
let end_ady = end_cdy;
ShapeRecord::CurvedEdge {
control_delta_x: Twips::new(
(start_cdx.get() as f32 * a + end_cdx.get() as f32 * b) as i32,
),
control_delta_y: Twips::new(
(start_cdy.get() as f32 * a + end_cdy.get() as f32 * b) as i32,
),
anchor_delta_x: Twips::new(
(start_adx.get() as f32 * a + end_adx.get() as f32 * b) as i32,
),
anchor_delta_y: Twips::new(
(start_ady.get() as f32 * a + end_ady.get() as f32 * b) as i32,
),
}
}
_ => unreachable!("{:?} {:?}", start, end),
}
}
pub fn ratio(&self) -> u16 {
self.ratio
}
pub fn set_ratio(&mut self, ratio: u16) {
self.ratio = ratio;
}
}
impl<'gc> DisplayObject<'gc> for MorphShape<'gc> {
impl_display_object!(base);
fn as_morph_shape(&self) -> Option<&Self> {
Some(self)
}
fn as_morph_shape_mut(&mut self) -> Option<&mut Self> {
Some(self)
}
fn run_frame(&mut self, context: &mut UpdateContext) {
if !self.frames.contains_key(&self.ratio) {
self.register_ratio(context.renderer, self.ratio);
}
}
fn render(&self, context: &mut RenderContext) {
context.transform_stack.push(self.transform());
if let Some(shape) = self.frames.get(&self.ratio) {
context
.renderer
.render_shape(*shape, context.transform_stack.transform());
} else {
warn!("Missing ratio for morph shape");
}
context.transform_stack.pop();
}
}
unsafe impl<'gc> gc_arena::Collect for MorphShape<'gc> {
#[inline]
fn trace(&self, cc: gc_arena::CollectionContext) {
self.base.trace(cc);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,52 +1,52 @@
use swf::TagCode;
pub type DecodeResult = Result<(), Box<dyn std::error::Error>>;
pub type SwfStream<R> = swf::read::Reader<std::io::Cursor<R>>;
#[derive(Debug, Clone)]
pub struct SwfSlice {
pub data: std::sync::Arc<Vec<u8>>,
pub start: usize,
pub end: usize,
}
impl AsRef<[u8]> for SwfSlice {
fn as_ref(&self) -> &[u8] {
&self.data[self.start..self.end]
}
}
pub fn decode_tags<'a, R, F>(
reader: &'a mut SwfStream<R>,
mut tag_callback: F,
stop_tag: TagCode,
) -> Result<(), Box<dyn std::error::Error>>
where
R: 'a + AsRef<[u8]>,
F: FnMut(&mut SwfStream<R>, TagCode, usize) -> DecodeResult,
{
loop {
let (tag_code, tag_len) = reader.read_tag_code_and_length()?;
let end_pos = reader.get_ref().position() + tag_len as u64;
let tag = TagCode::from_u16(tag_code);
if let Some(tag) = tag {
let result = tag_callback(reader, tag, tag_len);
if let Err(_e) = result {
log::error!("Error running definition tag: {:?}", tag);
}
if stop_tag == tag {
break;
}
} else {
log::warn!("Unknown tag code: {:?}", tag_code);
}
use std::io::{Seek, SeekFrom};
reader.get_mut().seek(SeekFrom::Start(end_pos))?;
}
Ok(())
}
use swf::TagCode;
pub type DecodeResult = Result<(), Box<dyn std::error::Error>>;
pub type SwfStream<R> = swf::read::Reader<std::io::Cursor<R>>;
#[derive(Debug, Clone)]
pub struct SwfSlice {
pub data: std::sync::Arc<Vec<u8>>,
pub start: usize,
pub end: usize,
}
impl AsRef<[u8]> for SwfSlice {
fn as_ref(&self) -> &[u8] {
&self.data[self.start..self.end]
}
}
pub fn decode_tags<'a, R, F>(
reader: &'a mut SwfStream<R>,
mut tag_callback: F,
stop_tag: TagCode,
) -> Result<(), Box<dyn std::error::Error>>
where
R: 'a + AsRef<[u8]>,
F: FnMut(&mut SwfStream<R>, TagCode, usize) -> DecodeResult,
{
loop {
let (tag_code, tag_len) = reader.read_tag_code_and_length()?;
let end_pos = reader.get_ref().position() + tag_len as u64;
let tag = TagCode::from_u16(tag_code);
if let Some(tag) = tag {
let result = tag_callback(reader, tag, tag_len);
if let Err(_e) = result {
log::error!("Error running definition tag: {:?}", tag);
}
if stop_tag == tag {
break;
}
} else {
log::warn!("Unknown tag code: {:?}", tag_code);
}
use std::io::{Seek, SeekFrom};
reader.get_mut().seek(SeekFrom::Start(end_pos))?;
}
Ok(())
}

View File

@ -31,7 +31,10 @@ impl TransformStack {
let cur_transform = self.transform();
let matrix = cur_transform.matrix * transform.matrix;
let color_transform = cur_transform.color_transform * transform.color_transform;
self.0.push(Transform { matrix, color_transform });
self.0.push(Transform {
matrix,
color_transform,
});
}
pub fn pop(&mut self) {

View File

@ -1,80 +1,80 @@
//! Tests running SWFs in a headless Ruffle instance.
//!
//! Trace output can be compared with correct output from the official Flash Payer.
use log::{Metadata, Record};
use ruffle_core::backend::{audio::NullAudioBackend, render::NullRenderer};
use ruffle_core::Player;
use std::cell::RefCell;
type Error = Box<dyn std::error::Error>;
// This macro generates test cases for a given list of SWFs.
macro_rules! swf_tests {
($(($name:ident, $path:expr, $num_frames:literal),)*) => {
$(
#[test]
fn $name() -> Result<(), Error> {
test_swf(
concat!("tests/swfs/", $path, "/test.swf"),
$num_frames,
concat!("tests/swfs/", $path, "/output.txt"),
)
}
)*
}
}
// List of SWFs to test.
// Format: (test_name, test_folder, number_of_frames_to_run)
// The test folder is a relative to core/tests/swfs
// Inside the folder is expected to be "test.swf" and "output.txt" with the correct output.
swf_tests! {
(single_frame, "avm1/single_frame", 2),
(looping, "avm1/looping", 6),
}
/// Loads an SWF and runs it through the Ruffle core for a number of frames.
/// Tests that the trace output matches the given expected output.
fn test_swf(swf_path: &str, num_frames: u32, expected_output_path: &str) -> Result<(), Error> {
let _ = log::set_logger(&TRACE_LOGGER).map(|()| log::set_max_level(log::LevelFilter::Info));
let expected_output = std::fs::read_to_string(expected_output_path)?.replace("\r\n", "\n");
let swf_data = std::fs::read(swf_path)?;
let mut player = Player::new(NullRenderer, NullAudioBackend::new(), swf_data)?;
for _ in 0..num_frames {
player.run_frame();
}
assert_eq!(trace_log(), expected_output);
Ok(())
}
thread_local! {
static TRACE_LOG: RefCell<String> = RefCell::new(String::new());
}
static TRACE_LOGGER: TraceLogger = TraceLogger;
/// `TraceLogger` captures output from AVM trace actions into a String.
struct TraceLogger;
fn trace_log() -> String {
TRACE_LOG.with(|log| log.borrow().clone())
}
impl log::Log for TraceLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.target() == "avm_trace"
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
TRACE_LOG.with(|log| log.borrow_mut().push_str(&format!("{}\n", record.args())));
}
}
fn flush(&self) {}
}
//! Tests running SWFs in a headless Ruffle instance.
//!
//! Trace output can be compared with correct output from the official Flash Payer.
use log::{Metadata, Record};
use ruffle_core::backend::{audio::NullAudioBackend, render::NullRenderer};
use ruffle_core::Player;
use std::cell::RefCell;
type Error = Box<dyn std::error::Error>;
// This macro generates test cases for a given list of SWFs.
macro_rules! swf_tests {
($(($name:ident, $path:expr, $num_frames:literal),)*) => {
$(
#[test]
fn $name() -> Result<(), Error> {
test_swf(
concat!("tests/swfs/", $path, "/test.swf"),
$num_frames,
concat!("tests/swfs/", $path, "/output.txt"),
)
}
)*
}
}
// List of SWFs to test.
// Format: (test_name, test_folder, number_of_frames_to_run)
// The test folder is a relative to core/tests/swfs
// Inside the folder is expected to be "test.swf" and "output.txt" with the correct output.
swf_tests! {
(single_frame, "avm1/single_frame", 2),
(looping, "avm1/looping", 6),
}
/// Loads an SWF and runs it through the Ruffle core for a number of frames.
/// Tests that the trace output matches the given expected output.
fn test_swf(swf_path: &str, num_frames: u32, expected_output_path: &str) -> Result<(), Error> {
let _ = log::set_logger(&TRACE_LOGGER).map(|()| log::set_max_level(log::LevelFilter::Info));
let expected_output = std::fs::read_to_string(expected_output_path)?.replace("\r\n", "\n");
let swf_data = std::fs::read(swf_path)?;
let mut player = Player::new(NullRenderer, NullAudioBackend::new(), swf_data)?;
for _ in 0..num_frames {
player.run_frame();
}
assert_eq!(trace_log(), expected_output);
Ok(())
}
thread_local! {
static TRACE_LOG: RefCell<String> = RefCell::new(String::new());
}
static TRACE_LOGGER: TraceLogger = TraceLogger;
/// `TraceLogger` captures output from AVM trace actions into a String.
struct TraceLogger;
fn trace_log() -> String {
TRACE_LOG.with(|log| log.borrow().clone())
}
impl log::Log for TraceLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.target() == "avm_trace"
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
TRACE_LOG.with(|log| log.borrow_mut().push_str(&format!("{}\n", record.args())));
}
}
fn flush(&self) {}
}

View File

@ -1,156 +1,156 @@
use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{stream_tag_reader, AdpcmDecoder, Decoder, Mp3Decoder};
use ruffle_core::backend::audio::{swf, AudioBackend, AudioStreamHandle, SoundHandle};
use std::io::Cursor;
use std::sync::Arc;
pub struct RodioAudioBackend {
sounds: Arena<Sound>,
active_sounds: Arena<rodio::Sink>,
streams: Arena<AudioStream>,
device: rodio::Device,
}
#[allow(dead_code)]
struct AudioStream {
clip_id: swf::CharacterId,
info: swf::SoundStreamHead,
sink: rodio::Sink,
}
#[allow(dead_code)]
struct Sound {
format: swf::SoundFormat,
data: Arc<Vec<u8>>,
}
impl RodioAudioBackend {
pub fn new() -> Result<Self, Box<dyn std::error::Error>> {
Ok(Self {
sounds: Arena::new(),
streams: Arena::new(),
active_sounds: Arena::new(),
device: rodio::default_output_device().ok_or("Unable to create output device")?,
})
}
}
impl AudioBackend for RodioAudioBackend {
fn register_sound(
&mut self,
swf_sound: &swf::Sound,
) -> Result<SoundHandle, Box<dyn std::error::Error>> {
let sound = Sound {
format: swf_sound.format.clone(),
data: Arc::new(swf_sound.data.clone()),
};
Ok(self.sounds.insert(sound))
}
fn start_stream(
&mut self,
clip_id: swf::CharacterId,
clip_data: ruffle_core::tag_utils::SwfSlice,
stream_info: &swf::SoundStreamHead,
) -> AudioStreamHandle {
let sink = rodio::Sink::new(&self.device);
let format = &stream_info.stream_format;
let decoder = Mp3Decoder::new(
if format.is_stereo { 2 } else { 1 },
format.sample_rate.into(),
stream_tag_reader(clip_data),
);
let stream = AudioStream {
clip_id,
info: stream_info.clone(),
sink,
};
stream.sink.append(DecoderSource(Box::new(decoder)));
self.streams.insert(stream)
}
fn play_sound(&mut self, sound: SoundHandle) {
let sound = &self.sounds[sound];
use swf::AudioCompression;
match sound.format.compression {
AudioCompression::Uncompressed => {
let mut data = Vec::with_capacity(sound.data.len() / 2);
let mut i = 0;
while i < sound.data.len() {
let val = i16::from(sound.data[i]) | (i16::from(sound.data[i + 1]) << 8);
data.push(val);
i += 2;
}
let buffer = rodio::buffer::SamplesBuffer::new(
if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(),
data,
);
let sink = rodio::Sink::new(&self.device);
sink.append(buffer);
self.active_sounds.insert(sink);
}
AudioCompression::Adpcm => {
let decoder = AdpcmDecoder::new(
Cursor::new(sound.data.to_vec()),
sound.format.is_stereo,
sound.format.sample_rate,
)
.unwrap();
let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink);
}
AudioCompression::Mp3 => {
let decoder = Mp3Decoder::new(
if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(),
Cursor::new(sound.data.to_vec()),
);
let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink);
}
_ => unimplemented!(),
}
}
fn tick(&mut self) {
self.active_sounds.retain(|_, sink| !sink.empty());
}
}
struct DecoderSource(Box<dyn Decoder + Send>);
impl Iterator for DecoderSource {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
self.0.next()
}
}
impl rodio::Source for DecoderSource {
#[inline]
fn current_frame_len(&self) -> Option<usize> {
None
}
#[inline]
fn channels(&self) -> u16 {
self.0.num_channels().into()
}
#[inline]
fn sample_rate(&self) -> u32 {
self.0.sample_rate().into()
}
#[inline]
fn total_duration(&self) -> Option<std::time::Duration> {
None
}
}
use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{stream_tag_reader, AdpcmDecoder, Decoder, Mp3Decoder};
use ruffle_core::backend::audio::{swf, AudioBackend, AudioStreamHandle, SoundHandle};
use std::io::Cursor;
use std::sync::Arc;
pub struct RodioAudioBackend {
sounds: Arena<Sound>,
active_sounds: Arena<rodio::Sink>,
streams: Arena<AudioStream>,
device: rodio::Device,
}
#[allow(dead_code)]
struct AudioStream {
clip_id: swf::CharacterId,
info: swf::SoundStreamHead,
sink: rodio::Sink,
}
#[allow(dead_code)]
struct Sound {
format: swf::SoundFormat,
data: Arc<Vec<u8>>,
}
impl RodioAudioBackend {
pub fn new() -> Result<Self, Box<dyn std::error::Error>> {
Ok(Self {
sounds: Arena::new(),
streams: Arena::new(),
active_sounds: Arena::new(),
device: rodio::default_output_device().ok_or("Unable to create output device")?,
})
}
}
impl AudioBackend for RodioAudioBackend {
fn register_sound(
&mut self,
swf_sound: &swf::Sound,
) -> Result<SoundHandle, Box<dyn std::error::Error>> {
let sound = Sound {
format: swf_sound.format.clone(),
data: Arc::new(swf_sound.data.clone()),
};
Ok(self.sounds.insert(sound))
}
fn start_stream(
&mut self,
clip_id: swf::CharacterId,
clip_data: ruffle_core::tag_utils::SwfSlice,
stream_info: &swf::SoundStreamHead,
) -> AudioStreamHandle {
let sink = rodio::Sink::new(&self.device);
let format = &stream_info.stream_format;
let decoder = Mp3Decoder::new(
if format.is_stereo { 2 } else { 1 },
format.sample_rate.into(),
stream_tag_reader(clip_data),
);
let stream = AudioStream {
clip_id,
info: stream_info.clone(),
sink,
};
stream.sink.append(DecoderSource(Box::new(decoder)));
self.streams.insert(stream)
}
fn play_sound(&mut self, sound: SoundHandle) {
let sound = &self.sounds[sound];
use swf::AudioCompression;
match sound.format.compression {
AudioCompression::Uncompressed => {
let mut data = Vec::with_capacity(sound.data.len() / 2);
let mut i = 0;
while i < sound.data.len() {
let val = i16::from(sound.data[i]) | (i16::from(sound.data[i + 1]) << 8);
data.push(val);
i += 2;
}
let buffer = rodio::buffer::SamplesBuffer::new(
if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(),
data,
);
let sink = rodio::Sink::new(&self.device);
sink.append(buffer);
self.active_sounds.insert(sink);
}
AudioCompression::Adpcm => {
let decoder = AdpcmDecoder::new(
Cursor::new(sound.data.to_vec()),
sound.format.is_stereo,
sound.format.sample_rate,
)
.unwrap();
let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink);
}
AudioCompression::Mp3 => {
let decoder = Mp3Decoder::new(
if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(),
Cursor::new(sound.data.to_vec()),
);
let sink = rodio::Sink::new(&self.device);
sink.append(DecoderSource(Box::new(decoder)));
self.active_sounds.insert(sink);
}
_ => unimplemented!(),
}
}
fn tick(&mut self) {
self.active_sounds.retain(|_, sink| !sink.empty());
}
}
struct DecoderSource(Box<dyn Decoder + Send>);
impl Iterator for DecoderSource {
type Item = i16;
#[inline]
fn next(&mut self) -> Option<i16> {
self.0.next()
}
}
impl rodio::Source for DecoderSource {
#[inline]
fn current_frame_len(&self) -> Option<usize> {
None
}
#[inline]
fn channels(&self) -> u16 {
self.0.num_channels().into()
}
#[inline]
fn sample_rate(&self) -> u32 {
self.0.sample_rate().into()
}
#[inline]
fn total_duration(&self) -> Option<std::time::Duration> {
None
}
}

File diff suppressed because it is too large Load Diff

1
rustfmt.toml Normal file
View File

@ -0,0 +1 @@
newline_style = "Unix"

View File

@ -1,439 +1,500 @@
use fnv::FnvHashMap;
use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{AdpcmDecoder, Mp3Decoder};
use ruffle_core::backend::audio::{AudioBackend, AudioStreamHandle, SoundHandle};
use ruffle_core::backend::audio::swf::{self, AudioCompression};
use std::cell::{Cell, RefCell};
use std::rc::Rc;
use wasm_bindgen::{closure::Closure, JsCast};
use web_sys::AudioContext;
pub struct WebAudioBackend {
context: AudioContext,
sounds: Arena<Sound>,
stream_data: FnvHashMap<swf::CharacterId, StreamData>,
id_to_sound: FnvHashMap<swf::CharacterId, SoundHandle>,
left_samples: Vec<f32>,
right_samples: Vec<f32>,
}
thread_local! {
static STREAMS: RefCell<Arena<AudioStream>> = RefCell::new(Arena::new());
static NUM_SOUNDS_LOADING: Cell<u32> = Cell::new(0);
}
struct StreamData {
format: swf::SoundFormat,
audio_data: Vec<u8>,
num_sample_frames: u32,
samples_per_block: u32,
}
type AudioBufferPtr = Rc<RefCell<web_sys::AudioBuffer>>;
// A sound can be either as a JS AudioBuffer and as a on--the-fly decoded stream using a ScriptProcessorNode.
#[allow(dead_code)]
enum SoundSource {
// Pre-decoded audio buffer.
AudioBuffer(AudioBufferPtr),
// Decode the audio data on the fly from a byte stream.
Decoder(Vec<u8>),
}
struct Sound {
format: swf::SoundFormat,
source: SoundSource,
}
type Decoder = Box<dyn Iterator<Item=i16>>;
#[allow(dead_code)]
enum AudioStream {
Decoder { decoder: Decoder, is_stereo: bool, },// closure: Option<Closure<Box<FnMut(web_sys::AudioProcessingEvent)>>> } ,
AudioBuffer { node: web_sys::AudioBufferSourceNode },
}
type Error = Box<dyn std::error::Error>;
impl WebAudioBackend {
pub fn new() -> Result<Self, Error> {
let context = AudioContext::new().map_err(|_| "Unable to create AudioContext")?;
Ok(Self {
context,
sounds: Arena::new(),
stream_data: FnvHashMap::default(),
id_to_sound: FnvHashMap::default(),
left_samples: vec![],
right_samples: vec![],
})
}
fn play_sound_internal(&mut self, handle: SoundHandle) -> SoundHandle {
let sound = self.sounds.get(handle).unwrap();
match &sound.source {
SoundSource::AudioBuffer(audio_buffer) => {
let audio_buffer = audio_buffer.borrow();
let node = self.context.create_buffer_source().unwrap();
node.set_buffer(Some(&*audio_buffer));
node
.connect_with_audio_node(&self.context.destination())
.unwrap();
node.start().unwrap();
let audio_stream = AudioStream::AudioBuffer {
node
};
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
streams.insert(audio_stream)
})
}
SoundSource::Decoder(audio_data) => {
let decoder: Decoder = match sound.format.compression {
AudioCompression::Adpcm => Box::new(AdpcmDecoder::new(
std::io::Cursor::new(audio_data.to_vec()),
sound.format.is_stereo,
sound.format.sample_rate
).unwrap()),
AudioCompression::Mp3 => Box::new(Mp3Decoder::new(
if sound.format.is_stereo {
2
} else {
1
},
sound.format.sample_rate.into(),
std::io::Cursor::new(audio_data.to_vec())//&sound.data[..]
)),
_ => unimplemented!()
};
let decoder: Decoder = if sound.format.sample_rate != self.context.sample_rate() as u16 {
Box::new(resample(decoder, sound.format.sample_rate, self.context.sample_rate() as u16, sound.format.is_stereo))
} else {
decoder
};
let audio_stream = AudioStream::Decoder {
decoder,
is_stereo: sound.format.is_stereo,
//closure: None,
};
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
let stream_handle = streams.insert(audio_stream);
let script_processor_node = self.context.create_script_processor_with_buffer_size_and_number_of_input_channels_and_number_of_output_channels(4096, 0, if sound.format.is_stereo { 2 } else { 1 }).unwrap();
let script_node = script_processor_node.clone();
let closure = Closure::wrap(Box::new(move |event| {
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
let audio_stream = streams.get_mut(stream_handle).unwrap();
let complete = WebAudioBackend::update_script_processor(audio_stream, event);
if complete {
streams.remove(stream_handle);
script_node.disconnect().unwrap();
}
})
}) as Box<dyn FnMut(web_sys::AudioProcessingEvent)>);
script_processor_node.set_onaudioprocess(Some(closure.as_ref().unchecked_ref()));
// TODO: This will leak memory per playing sound. Remember and properly drop the closure.
closure.forget();
stream_handle
})
}
}
}
fn decompress_to_audio_buffer(&mut self, format: &swf::SoundFormat, audio_data: &[u8], num_sample_frames: u32) -> AudioBufferPtr {
if format.compression == AudioCompression::Mp3 {
return self.decompress_mp3_to_audio_buffer(format, audio_data, num_sample_frames);
}
// This sucks. Firefox doesn't like 5512Hz sample rate, so manually double up the samples.
// 5512Hz should be relatively rare.
let audio_buffer = if format.sample_rate > 5512 {
self.context.create_buffer(
if format.is_stereo { 2 } else { 1 },
num_sample_frames,
f32::from(format.sample_rate)
).unwrap()
} else {
self.context.create_buffer(
if format.is_stereo { 2 } else { 1 },
num_sample_frames * 2,
11025.0
).unwrap()
};
match format.compression {
AudioCompression::Uncompressed => {
// TODO: Check for is_16_bit.
self.left_samples = audio_data.iter().step_by(2).cloned().map(|n| f32::from(n) / 32767.0).collect();
if format.is_stereo {
self.right_samples = audio_data.iter().skip(1).step_by(2).cloned().map(|n| f32::from(n) / 32767.0).collect();
}
}
AudioCompression::Adpcm => {
let mut decoder = AdpcmDecoder::new(audio_data,
format.is_stereo,
format.sample_rate
).unwrap();
if format.is_stereo {
while let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
self.left_samples.push(f32::from(l) / 32767.0);
self.right_samples.push(f32::from(r) / 32767.0);
}
} else {
self.left_samples = decoder.map(|n| f32::from(n) / 32767.0).collect();
}
}
_ => unimplemented!(),
}
// Double up samples for 5512Hz audio to satisfy Firefox.
if format.sample_rate == 5512 {
let mut samples = Vec::with_capacity(self.left_samples.len() * 2);
for sample in &self.left_samples {
samples.push(*sample);
samples.push(*sample);
}
self.left_samples = samples;
if format.is_stereo {
let mut samples = Vec::with_capacity(self.right_samples.len() * 2);
for sample in &self.right_samples {
samples.push(*sample);
samples.push(*sample);
}
self.right_samples = samples;
}
}
audio_buffer.copy_to_channel(&mut self.left_samples, 0).unwrap();
if format.is_stereo {
audio_buffer.copy_to_channel(&mut self.right_samples, 1).unwrap();
}
Rc::new(RefCell::new(audio_buffer))
}
fn decompress_mp3_to_audio_buffer(&mut self, format: &swf::SoundFormat, audio_data: &[u8], _num_sample_frames: u32) -> AudioBufferPtr {
// We use the Web decodeAudioData API to decode MP3 data.
// TODO: Is it possible we finish loading before the MP3 is decoding?
let audio_buffer = self.context.create_buffer(1, 1, self.context.sample_rate()).unwrap();
let audio_buffer = Rc::new(RefCell::new(audio_buffer));
let data_array = unsafe { js_sys::Uint8Array::view(&audio_data[..]) };
let array_buffer = data_array.buffer().slice_with_end(
data_array.byte_offset(),
data_array.byte_offset() + data_array.byte_length(),
);
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() + 1));
let _num_channels = if format.is_stereo { 2 } else { 1 };
let buffer_ptr = Rc::clone(&audio_buffer);
let success_closure = Closure::wrap(Box::new(move |buffer: web_sys::AudioBuffer| {
*buffer_ptr.borrow_mut() = buffer;
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
})
as Box<dyn FnMut(web_sys::AudioBuffer)>);
let error_closure = Closure::wrap(Box::new(move || {
log::info!("Error decoding MP3 audio");
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
})
as Box<dyn FnMut()>);
self.context.decode_audio_data_with_success_callback_and_error_callback(
&array_buffer,
success_closure.as_ref().unchecked_ref(),
error_closure.as_ref().unchecked_ref()
).unwrap();
// TODO: This will leak memory (once per decompressed MP3).
// Not a huge deal as there are probably not many MP3s in an SWF.
success_closure.forget();
error_closure.forget();
audio_buffer
}
fn update_script_processor(
audio_stream: &mut AudioStream,
event: web_sys::AudioProcessingEvent,
) -> bool {
let mut complete = false;
let mut left_samples = vec![];
let mut right_samples = vec![];
if let AudioStream::Decoder { decoder, is_stereo, .. } = audio_stream {
let output_buffer = event.output_buffer().unwrap();
let num_frames = output_buffer.length() as usize;
for _ in 0..num_frames {
if let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
left_samples.push(f32::from(l) / 32767.0);
if *is_stereo {
right_samples.push(f32::from(r) / 32767.0);
}
} else {
complete = true;
break;
}
}
output_buffer.copy_to_channel(&mut left_samples[..], 0).unwrap();
if *is_stereo {
output_buffer.copy_to_channel(&mut right_samples[..], 1).unwrap();
}
}
complete
}
}
impl AudioBackend for WebAudioBackend {
fn register_sound(&mut self, sound: &swf::Sound) -> Result<SoundHandle, Error> {
// Slice off latency seek for MP3 data.
let data = if sound.format.compression == AudioCompression::Mp3 {
&sound.data[2..]
} else {
&sound.data[..]
};
let sound = Sound {
format: sound.format.clone(),
source: SoundSource::AudioBuffer(self.decompress_to_audio_buffer(&sound.format, data, sound.num_samples)),
};
Ok(self.sounds.insert(sound))
}
fn preload_sound_stream_head(&mut self, clip_id: swf::CharacterId, stream_info: &swf::SoundStreamHead) {
self.stream_data.entry(clip_id).or_insert_with(|| {
StreamData {
format: stream_info.stream_format.clone(),
audio_data: vec![],
num_sample_frames: 0,
samples_per_block: stream_info.num_samples_per_block.into(),
}
});
}
fn preload_sound_stream_block(&mut self, clip_id: swf::CharacterId, audio_data: &[u8]) {
if let Some(stream) = self.stream_data.get_mut(&clip_id) {
match stream.format.compression {
AudioCompression::Uncompressed | AudioCompression::UncompressedUnknownEndian => {
let frame_len = if stream.format.is_stereo { 2 } else { 1 } * if stream.format.is_16_bit { 2 } else { 1 };
stream.num_sample_frames += (audio_data.len() as u32) / frame_len;
stream.audio_data.extend_from_slice(audio_data);
}
AudioCompression::Mp3 => {
let num_sample_frames = (u32::from(audio_data[2]) << 8) | u32::from(audio_data[3]);
stream.num_sample_frames += num_sample_frames;
// MP3 streaming data:
// First two bytes = number of samples
// Second two bytes = 'latency seek' (amount to skip when seeking to this frame)
stream.audio_data.extend_from_slice(&audio_data[4..]);
}
_ => {
// TODO: This is a guess and will vary slightly from block to block!
stream.num_sample_frames += stream.samples_per_block;
}
}
}
}
fn preload_sound_stream_end(&mut self, clip_id: swf::CharacterId) {
if let Some(stream) = self.stream_data.remove(&clip_id) {
if !stream.audio_data.is_empty()
{
let audio_buffer = self.decompress_to_audio_buffer(&stream.format, &stream.audio_data[..], stream.num_sample_frames);
let handle = self.sounds.insert(Sound {
format: stream.format,
source: SoundSource::AudioBuffer(audio_buffer),
});
self.id_to_sound.insert(clip_id, handle);
}
}
}
fn play_sound(&mut self, sound: SoundHandle) {
self.play_sound_internal(sound);
}
fn start_stream(
&mut self,
clip_id: swf::CharacterId,
_clip_data: ruffle_core::tag_utils::SwfSlice,
_stream_info: &swf::SoundStreamHead,
) -> AudioStreamHandle {
if let Some(&handle) = self.id_to_sound.get(&clip_id) {
self.play_sound_internal(handle)
} else {
log::error!("Missing stream for clip {}", clip_id);
// TODO: Return dummy sound.
panic!();
}
}
fn is_loading_complete(&self) -> bool {
NUM_SOUNDS_LOADING.with(|n| n.get() == 0)
}
fn prime_audio(&mut self) {
// Allow audio to start playing after a user gesture.
let _ = self.context.resume();
}
}
// Janky resmapling code.
// TODO: Clean this up.
fn resample(mut input: impl Iterator<Item=i16>, input_sample_rate: u16, output_sample_rate: u16, is_stereo: bool) -> impl Iterator<Item=i16> {
let (mut left0, mut right0) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left1, mut right1) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left, mut right) = (left0.unwrap(), right0.unwrap());
let dt_input = 1.0 / f64::from(input_sample_rate);
let dt_output = 1.0 / f64::from(output_sample_rate);
let mut t = 0.0;
let mut cur_channel = 0;
std::iter::from_fn(move || {
if cur_channel == 1 {
cur_channel = 0;
return Some(right);
}
if let (Some(l0), Some(r0), Some(l1), Some(r1)) = (left0, right0, left1, right1) {
let a = t / dt_input;
let l0 = f64::from(l0);
let l1 = f64::from(l1);
let r0 = f64::from(r0);
let r1 = f64::from(r1);
left = (l0 + (l1 - l0) * a) as i16;
right = (r0 + (r1 - r0) * a) as i16;
t += dt_output;
while t >= dt_input {
t -= dt_input;
left0 = left1;
right0 = right1;
left1 = input.next();
if is_stereo {
right1 = input.next();
} else {
right1 = left1;
}
}
cur_channel = 1;
Some(left)
} else {
None
}
})
}
use fnv::FnvHashMap;
use generational_arena::Arena;
use ruffle_core::backend::audio::decoders::{AdpcmDecoder, Mp3Decoder};
use ruffle_core::backend::audio::swf::{self, AudioCompression};
use ruffle_core::backend::audio::{AudioBackend, AudioStreamHandle, SoundHandle};
use std::cell::{Cell, RefCell};
use std::rc::Rc;
use wasm_bindgen::{closure::Closure, JsCast};
use web_sys::AudioContext;
pub struct WebAudioBackend {
context: AudioContext,
sounds: Arena<Sound>,
stream_data: FnvHashMap<swf::CharacterId, StreamData>,
id_to_sound: FnvHashMap<swf::CharacterId, SoundHandle>,
left_samples: Vec<f32>,
right_samples: Vec<f32>,
}
thread_local! {
static STREAMS: RefCell<Arena<AudioStream>> = RefCell::new(Arena::new());
static NUM_SOUNDS_LOADING: Cell<u32> = Cell::new(0);
}
struct StreamData {
format: swf::SoundFormat,
audio_data: Vec<u8>,
num_sample_frames: u32,
samples_per_block: u32,
}
type AudioBufferPtr = Rc<RefCell<web_sys::AudioBuffer>>;
// A sound can be either as a JS AudioBuffer and as a on--the-fly decoded stream using a ScriptProcessorNode.
#[allow(dead_code)]
enum SoundSource {
// Pre-decoded audio buffer.
AudioBuffer(AudioBufferPtr),
// Decode the audio data on the fly from a byte stream.
Decoder(Vec<u8>),
}
struct Sound {
format: swf::SoundFormat,
source: SoundSource,
}
type Decoder = Box<dyn Iterator<Item = i16>>;
#[allow(dead_code)]
enum AudioStream {
Decoder {
decoder: Decoder,
is_stereo: bool,
}, // closure: Option<Closure<Box<FnMut(web_sys::AudioProcessingEvent)>>> } ,
AudioBuffer {
node: web_sys::AudioBufferSourceNode,
},
}
type Error = Box<dyn std::error::Error>;
impl WebAudioBackend {
pub fn new() -> Result<Self, Error> {
let context = AudioContext::new().map_err(|_| "Unable to create AudioContext")?;
Ok(Self {
context,
sounds: Arena::new(),
stream_data: FnvHashMap::default(),
id_to_sound: FnvHashMap::default(),
left_samples: vec![],
right_samples: vec![],
})
}
fn play_sound_internal(&mut self, handle: SoundHandle) -> SoundHandle {
let sound = self.sounds.get(handle).unwrap();
match &sound.source {
SoundSource::AudioBuffer(audio_buffer) => {
let audio_buffer = audio_buffer.borrow();
let node = self.context.create_buffer_source().unwrap();
node.set_buffer(Some(&*audio_buffer));
node.connect_with_audio_node(&self.context.destination())
.unwrap();
node.start().unwrap();
let audio_stream = AudioStream::AudioBuffer { node };
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
streams.insert(audio_stream)
})
}
SoundSource::Decoder(audio_data) => {
let decoder: Decoder = match sound.format.compression {
AudioCompression::Adpcm => Box::new(
AdpcmDecoder::new(
std::io::Cursor::new(audio_data.to_vec()),
sound.format.is_stereo,
sound.format.sample_rate,
)
.unwrap(),
),
AudioCompression::Mp3 => Box::new(Mp3Decoder::new(
if sound.format.is_stereo { 2 } else { 1 },
sound.format.sample_rate.into(),
std::io::Cursor::new(audio_data.to_vec()), //&sound.data[..]
)),
_ => unimplemented!(),
};
let decoder: Decoder =
if sound.format.sample_rate != self.context.sample_rate() as u16 {
Box::new(resample(
decoder,
sound.format.sample_rate,
self.context.sample_rate() as u16,
sound.format.is_stereo,
))
} else {
decoder
};
let audio_stream = AudioStream::Decoder {
decoder,
is_stereo: sound.format.is_stereo,
//closure: None,
};
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
let stream_handle = streams.insert(audio_stream);
let script_processor_node = self.context.create_script_processor_with_buffer_size_and_number_of_input_channels_and_number_of_output_channels(4096, 0, if sound.format.is_stereo { 2 } else { 1 }).unwrap();
let script_node = script_processor_node.clone();
let closure = Closure::wrap(Box::new(move |event| {
STREAMS.with(|streams| {
let mut streams = streams.borrow_mut();
let audio_stream = streams.get_mut(stream_handle).unwrap();
let complete = WebAudioBackend::update_script_processor(audio_stream, event);
if complete {
streams.remove(stream_handle);
script_node.disconnect().unwrap();
}
})
}) as Box<dyn FnMut(web_sys::AudioProcessingEvent)>);
script_processor_node.set_onaudioprocess(Some(closure.as_ref().unchecked_ref()));
// TODO: This will leak memory per playing sound. Remember and properly drop the closure.
closure.forget();
stream_handle
})
}
}
}
fn decompress_to_audio_buffer(
&mut self,
format: &swf::SoundFormat,
audio_data: &[u8],
num_sample_frames: u32,
) -> AudioBufferPtr {
if format.compression == AudioCompression::Mp3 {
return self.decompress_mp3_to_audio_buffer(format, audio_data, num_sample_frames);
}
// This sucks. Firefox doesn't like 5512Hz sample rate, so manually double up the samples.
// 5512Hz should be relatively rare.
let audio_buffer = if format.sample_rate > 5512 {
self.context
.create_buffer(
if format.is_stereo { 2 } else { 1 },
num_sample_frames,
f32::from(format.sample_rate),
)
.unwrap()
} else {
self.context
.create_buffer(
if format.is_stereo { 2 } else { 1 },
num_sample_frames * 2,
11025.0,
)
.unwrap()
};
match format.compression {
AudioCompression::Uncompressed => {
// TODO: Check for is_16_bit.
self.left_samples = audio_data
.iter()
.step_by(2)
.cloned()
.map(|n| f32::from(n) / 32767.0)
.collect();
if format.is_stereo {
self.right_samples = audio_data
.iter()
.skip(1)
.step_by(2)
.cloned()
.map(|n| f32::from(n) / 32767.0)
.collect();
}
}
AudioCompression::Adpcm => {
let mut decoder =
AdpcmDecoder::new(audio_data, format.is_stereo, format.sample_rate).unwrap();
if format.is_stereo {
while let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
self.left_samples.push(f32::from(l) / 32767.0);
self.right_samples.push(f32::from(r) / 32767.0);
}
} else {
self.left_samples = decoder.map(|n| f32::from(n) / 32767.0).collect();
}
}
_ => unimplemented!(),
}
// Double up samples for 5512Hz audio to satisfy Firefox.
if format.sample_rate == 5512 {
let mut samples = Vec::with_capacity(self.left_samples.len() * 2);
for sample in &self.left_samples {
samples.push(*sample);
samples.push(*sample);
}
self.left_samples = samples;
if format.is_stereo {
let mut samples = Vec::with_capacity(self.right_samples.len() * 2);
for sample in &self.right_samples {
samples.push(*sample);
samples.push(*sample);
}
self.right_samples = samples;
}
}
audio_buffer
.copy_to_channel(&mut self.left_samples, 0)
.unwrap();
if format.is_stereo {
audio_buffer
.copy_to_channel(&mut self.right_samples, 1)
.unwrap();
}
Rc::new(RefCell::new(audio_buffer))
}
fn decompress_mp3_to_audio_buffer(
&mut self,
format: &swf::SoundFormat,
audio_data: &[u8],
_num_sample_frames: u32,
) -> AudioBufferPtr {
// We use the Web decodeAudioData API to decode MP3 data.
// TODO: Is it possible we finish loading before the MP3 is decoding?
let audio_buffer = self
.context
.create_buffer(1, 1, self.context.sample_rate())
.unwrap();
let audio_buffer = Rc::new(RefCell::new(audio_buffer));
let data_array = unsafe { js_sys::Uint8Array::view(&audio_data[..]) };
let array_buffer = data_array.buffer().slice_with_end(
data_array.byte_offset(),
data_array.byte_offset() + data_array.byte_length(),
);
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() + 1));
let _num_channels = if format.is_stereo { 2 } else { 1 };
let buffer_ptr = Rc::clone(&audio_buffer);
let success_closure = Closure::wrap(Box::new(move |buffer: web_sys::AudioBuffer| {
*buffer_ptr.borrow_mut() = buffer;
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
}) as Box<dyn FnMut(web_sys::AudioBuffer)>);
let error_closure = Closure::wrap(Box::new(move || {
log::info!("Error decoding MP3 audio");
NUM_SOUNDS_LOADING.with(|n| n.set(n.get() - 1));
}) as Box<dyn FnMut()>);
self.context
.decode_audio_data_with_success_callback_and_error_callback(
&array_buffer,
success_closure.as_ref().unchecked_ref(),
error_closure.as_ref().unchecked_ref(),
)
.unwrap();
// TODO: This will leak memory (once per decompressed MP3).
// Not a huge deal as there are probably not many MP3s in an SWF.
success_closure.forget();
error_closure.forget();
audio_buffer
}
fn update_script_processor(
audio_stream: &mut AudioStream,
event: web_sys::AudioProcessingEvent,
) -> bool {
let mut complete = false;
let mut left_samples = vec![];
let mut right_samples = vec![];
if let AudioStream::Decoder {
decoder, is_stereo, ..
} = audio_stream
{
let output_buffer = event.output_buffer().unwrap();
let num_frames = output_buffer.length() as usize;
for _ in 0..num_frames {
if let (Some(l), Some(r)) = (decoder.next(), decoder.next()) {
left_samples.push(f32::from(l) / 32767.0);
if *is_stereo {
right_samples.push(f32::from(r) / 32767.0);
}
} else {
complete = true;
break;
}
}
output_buffer
.copy_to_channel(&mut left_samples[..], 0)
.unwrap();
if *is_stereo {
output_buffer
.copy_to_channel(&mut right_samples[..], 1)
.unwrap();
}
}
complete
}
}
impl AudioBackend for WebAudioBackend {
fn register_sound(&mut self, sound: &swf::Sound) -> Result<SoundHandle, Error> {
// Slice off latency seek for MP3 data.
let data = if sound.format.compression == AudioCompression::Mp3 {
&sound.data[2..]
} else {
&sound.data[..]
};
let sound = Sound {
format: sound.format.clone(),
source: SoundSource::AudioBuffer(self.decompress_to_audio_buffer(
&sound.format,
data,
sound.num_samples,
)),
};
Ok(self.sounds.insert(sound))
}
fn preload_sound_stream_head(
&mut self,
clip_id: swf::CharacterId,
stream_info: &swf::SoundStreamHead,
) {
self.stream_data
.entry(clip_id)
.or_insert_with(|| StreamData {
format: stream_info.stream_format.clone(),
audio_data: vec![],
num_sample_frames: 0,
samples_per_block: stream_info.num_samples_per_block.into(),
});
}
fn preload_sound_stream_block(&mut self, clip_id: swf::CharacterId, audio_data: &[u8]) {
if let Some(stream) = self.stream_data.get_mut(&clip_id) {
match stream.format.compression {
AudioCompression::Uncompressed | AudioCompression::UncompressedUnknownEndian => {
let frame_len = if stream.format.is_stereo { 2 } else { 1 }
* if stream.format.is_16_bit { 2 } else { 1 };
stream.num_sample_frames += (audio_data.len() as u32) / frame_len;
stream.audio_data.extend_from_slice(audio_data);
}
AudioCompression::Mp3 => {
let num_sample_frames =
(u32::from(audio_data[2]) << 8) | u32::from(audio_data[3]);
stream.num_sample_frames += num_sample_frames;
// MP3 streaming data:
// First two bytes = number of samples
// Second two bytes = 'latency seek' (amount to skip when seeking to this frame)
stream.audio_data.extend_from_slice(&audio_data[4..]);
}
_ => {
// TODO: This is a guess and will vary slightly from block to block!
stream.num_sample_frames += stream.samples_per_block;
}
}
}
}
fn preload_sound_stream_end(&mut self, clip_id: swf::CharacterId) {
if let Some(stream) = self.stream_data.remove(&clip_id) {
if !stream.audio_data.is_empty() {
let audio_buffer = self.decompress_to_audio_buffer(
&stream.format,
&stream.audio_data[..],
stream.num_sample_frames,
);
let handle = self.sounds.insert(Sound {
format: stream.format,
source: SoundSource::AudioBuffer(audio_buffer),
});
self.id_to_sound.insert(clip_id, handle);
}
}
}
fn play_sound(&mut self, sound: SoundHandle) {
self.play_sound_internal(sound);
}
fn start_stream(
&mut self,
clip_id: swf::CharacterId,
_clip_data: ruffle_core::tag_utils::SwfSlice,
_stream_info: &swf::SoundStreamHead,
) -> AudioStreamHandle {
if let Some(&handle) = self.id_to_sound.get(&clip_id) {
self.play_sound_internal(handle)
} else {
log::error!("Missing stream for clip {}", clip_id);
// TODO: Return dummy sound.
panic!();
}
}
fn is_loading_complete(&self) -> bool {
NUM_SOUNDS_LOADING.with(|n| n.get() == 0)
}
fn prime_audio(&mut self) {
// Allow audio to start playing after a user gesture.
let _ = self.context.resume();
}
}
// Janky resmapling code.
// TODO: Clean this up.
fn resample(
mut input: impl Iterator<Item = i16>,
input_sample_rate: u16,
output_sample_rate: u16,
is_stereo: bool,
) -> impl Iterator<Item = i16> {
let (mut left0, mut right0) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left1, mut right1) = if is_stereo {
(input.next(), input.next())
} else {
let sample = input.next();
(sample, sample)
};
let (mut left, mut right) = (left0.unwrap(), right0.unwrap());
let dt_input = 1.0 / f64::from(input_sample_rate);
let dt_output = 1.0 / f64::from(output_sample_rate);
let mut t = 0.0;
let mut cur_channel = 0;
std::iter::from_fn(move || {
if cur_channel == 1 {
cur_channel = 0;
return Some(right);
}
if let (Some(l0), Some(r0), Some(l1), Some(r1)) = (left0, right0, left1, right1) {
let a = t / dt_input;
let l0 = f64::from(l0);
let l1 = f64::from(l1);
let r0 = f64::from(r0);
let r1 = f64::from(r1);
left = (l0 + (l1 - l0) * a) as i16;
right = (r0 + (r1 - r0) * a) as i16;
t += dt_output;
while t >= dt_input {
t -= dt_input;
left0 = left1;
right0 = right1;
left1 = input.next();
if is_stereo {
right1 = input.next();
} else {
right1 = left1;
}
}
cur_channel = 1;
Some(left)
} else {
None
}
})
}

File diff suppressed because it is too large Load Diff