This commit is contained in:
Mike Welsh 2019-04-30 01:53:21 -07:00
parent 663ad9f663
commit eb6c23d967
14 changed files with 317 additions and 13 deletions

View File

@ -6,4 +6,7 @@ members = [
]
[patch.'https://github.com/Herschel/swf-rs']
swf = { path = "../swf-rs" }
swf = { path = "../swf-rs" }
[patch.crates-io]
minimp3 = { path = "../minimp3-rs" }

View File

@ -6,7 +6,9 @@ edition = "2018"
[dependencies]
bacon_rajan_cc = "0.2"
generational-arena = "0.1"
log = "0.4"
minimp3 = "0.3.2"
url = "1.7.2"
svg = "0.5.12"
swf = { git = "https://github.com/Herschel/swf-rs", version = "*" }
@ -15,7 +17,7 @@ wasm-bindgen = "0.2"
[dependencies.web-sys]
version = "0.3.19"
features = ["CanvasRenderingContext2d", "HtmlCanvasElement", "HtmlImageElement"]
features = ["AudioBuffer", "AudioBufferSourceNode", "AudioContext", "AudioDestinationNode", "AudioNode", "CanvasRenderingContext2d", "HtmlCanvasElement", "HtmlImageElement"]
# Desktop dependencies
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]

27
core/src/audio.rs Normal file
View File

@ -0,0 +1,27 @@
use crate::backend::audio::AudioBackend;
//use generational_arena::Arena;
use swf::SoundStreamInfo;
pub struct Audio {
backend: Box<AudioBackend>,
}
pub type AudioStreamHandle = generational_arena::Index;
impl Audio {
pub fn new(backend: Box<AudioBackend>) -> Audio {
Audio { backend }
}
pub fn register_stream(&mut self, stream_info: &SoundStreamInfo) -> AudioStreamHandle {
self.backend.register_stream(stream_info)
}
pub fn queue_stream_samples(&mut self, handle: AudioStreamHandle, samples: &[u8]) {
self.backend.queue_stream_samples(handle, samples)
}
}
struct AudioStream {
stream_info: SoundStreamInfo,
}

View File

@ -1 +1,2 @@
pub mod audio;
pub mod render;

View File

@ -0,0 +1,9 @@
pub mod null;
pub mod web;
pub type AudioStreamHandle = generational_arena::Index;
pub trait AudioBackend {
fn register_stream(&mut self, stream_info: &swf::SoundStreamInfo) -> AudioStreamHandle;
fn queue_stream_samples(&mut self, handle: AudioStreamHandle, samples: &[u8]);
}

View File

@ -0,0 +1,24 @@
use super::{AudioBackend, AudioStreamHandle};
use generational_arena::Arena;
pub struct NullAudioBackend {
streams: Arena<()>,
}
impl NullAudioBackend {
pub fn new() -> NullAudioBackend {
NullAudioBackend {
streams: Arena::new(),
}
}
}
impl AudioBackend for NullAudioBackend {
fn register_stream(&mut self, _stream_info: &swf::SoundStreamInfo) -> AudioStreamHandle {
self.streams.insert(())
}
fn queue_stream_samples(&mut self, _handle: AudioStreamHandle, _samples: &[u8]) {
// Noop
}
}

View File

@ -0,0 +1,194 @@
use super::{AudioBackend, AudioStreamHandle};
use generational_arena::Arena;
use js_sys::{ArrayBuffer, Uint8Array};
use log::info;
use swf::SoundStreamInfo;
use wasm_bindgen::closure::Closure;
use web_sys::{AudioBuffer, AudioBufferSourceNode, AudioContext};
pub struct WebAudioBackend {
context: AudioContext,
streams: Arena<AudioStream>,
}
struct AudioStream {
info: SoundStreamInfo,
time: f64,
pending_mp3_samples: Vec<Vec<u8>>,
}
impl WebAudioBackend {
pub fn new() -> Result<Self, Box<std::error::Error>> {
let context = AudioContext::new().map_err(|_| "Unable to create AudioContext")?;
Ok(Self {
context,
streams: Arena::new(),
})
}
const BUFFER_TIME: f64 = 0.05;
}
impl AudioBackend for WebAudioBackend {
fn register_stream(&mut self, stream_info: &swf::SoundStreamInfo) -> AudioStreamHandle {
let stream = AudioStream {
info: stream_info.clone(),
time: 0.0,
pending_mp3_samples: vec![],
};
info!("Stream {}", stream_info.num_samples_per_block);
self.streams.insert(stream)
}
fn queue_stream_samples(&mut self, handle: AudioStreamHandle, samples: &[u8]) {
if let Some(stream) = self.streams.get_mut(handle) {
let current_time = self.context.current_time();
if current_time >= stream.time {
stream.time = current_time + WebAudioBackend::BUFFER_TIME;
}
let format = &stream.info.stream_format;
let num_channels = if format.is_stereo { 2 } else { 1 };
let frame_size = num_channels * if format.is_16_bit { 2 } else { 1 };
let num_frames = samples.len() / frame_size;
if format.compression == swf::AudioCompression::Uncompressed
|| format.compression == swf::AudioCompression::UncompressedUnknownEndian
{
let buffer = self
.context
.create_buffer(
num_channels as u32,
num_frames as u32,
format.sample_rate.into(),
)
.unwrap();
let mut i = 0;
if num_channels == 2 {
let mut left_samples = Vec::with_capacity(num_frames);
let mut right_samples = Vec::with_capacity(num_frames);
if format.is_16_bit {
while i < num_frames * 4 {
let left_sample =
((samples[i] as u16) | ((samples[i + 1] as u16) << 8)) as i16;
let right_sample =
((samples[i + 2] as u16) | ((samples[i + 3] as u16) << 8)) as i16;
left_samples.push((f32::from(left_sample)) / 32768.0);
right_samples.push((f32::from(right_sample)) / 32768.0);
i += 4;
}
} else {
while i < num_frames * 2 {
left_samples.push((f32::from(samples[i]) - 127.0) / 128.0);
right_samples.push((f32::from(samples[i + 1]) - 127.0) / 128.0);
i += 2;
}
}
buffer.copy_to_channel(&mut left_samples[..], 0).unwrap();
buffer.copy_to_channel(&mut right_samples[..], 1).unwrap();
} else {
let mut out_samples = Vec::with_capacity(num_frames);
if format.is_16_bit {
while i < num_frames * 2 {
let sample = f32::from(
((samples[i] as u16) | ((samples[i + 1] as u16) << 8)) as i16,
) / 32768.0;
if i == 0 {
info!("S: {}", sample);
}
out_samples.push(sample);
i += 2;
}
} else {
while i < num_frames {
out_samples.push((f32::from(samples[i]) - 127.0) / 128.0);
i += 1;
}
}
buffer.copy_to_channel(&mut out_samples[..], 0).unwrap();
}
let buffer_node = self.context.create_buffer_source().unwrap();
buffer_node.set_buffer(Some(&buffer));
buffer_node
.connect_with_audio_node(&self.context.destination())
.unwrap();
buffer_node.start_with_when(stream.time).unwrap();
stream.time += (num_frames as f64) / (format.sample_rate as f64);
} else if format.compression == swf::AudioCompression::Mp3 {
// stream
// .pending_mp3_samples
// .push(samples.iter().cloned().collect());
// let clone_samples =
// &stream.pending_mp3_samples[stream.pending_mp3_samples.len() - 1];
// let data_array = unsafe { Uint8Array::view(&clone_samples[2..]) };
// let array_buffer = data_array.buffer();
// let closure = Closure::wrap(Box::new(move |value| {
// web_sys::console::log_1(&"Hello using web-sys".into());
// })
// as Box<dyn FnMut(wasm_bindgen::JsValue)>);
// self.context
// .decode_audio_data(&array_buffer)
// .unwrap()
// .then(&closure);
use minimp3::{Decoder, Frame};
let num_frames = ((samples[0] as u16) | ((samples[1] as u16) << 8)) as usize;
let buffer = self
.context
.create_buffer(
num_channels as u32,
num_frames as u32,
format.sample_rate.into(),
)
.unwrap();
if format.is_stereo {
let mut left_samples = Vec::with_capacity(num_frames);
let mut right_samples = Vec::with_capacity(num_frames);
use minimp3::{Decoder, Error, Frame};
let mut decoder = Decoder::new(&samples[2..]);
let mut frames_decoded = 0;
while frames_decoded < num_frames {
match decoder.next_frame() {
Ok(Frame {
data,
sample_rate,
channels,
..
}) => {
let new_frames_decoded = data.len() / channels;
frames_decoded += new_frames_decoded;
let mut i: usize = 0;
while i < new_frames_decoded {
let left_sample = data[i];
let right_sample = data[i + 1];
left_samples.push((f32::from(left_sample)) / 32768.0);
right_samples.push((f32::from(right_sample)) / 32768.0);
i += 2;
}
}
Err(Error::Eof) => {
frames_decoded = num_frames;
break;
}
Err(e) => panic!("{:?}", e),
}
buffer.copy_to_channel(&mut left_samples[..], 0).unwrap();
buffer.copy_to_channel(&mut right_samples[..], 1).unwrap();
}
}
}
}
}
}

View File

@ -46,7 +46,6 @@ impl RenderBackend for WebCanvasRenderBackend {
"data:image/svg+xml,{}",
utf8_percent_encode(&svg, DEFAULT_ENCODE_SET)
);
info!("{}", svg_encoded);
image.set_src(&svg_encoded);
self.shapes.push(ShapeData {

View File

@ -1,7 +1,7 @@
#[macro_use]
mod display_object;
pub mod backend;
mod audio;
mod character;
mod color_transform;
mod graphic;
@ -12,5 +12,7 @@ mod player;
mod prelude;
mod transform;
pub mod backend;
pub use player::Player;
use swf::Color;

View File

@ -1,3 +1,4 @@
use crate::audio::AudioStreamHandle;
use crate::character::Character;
use crate::color_transform::ColorTransform;
use crate::display_object::{
@ -21,6 +22,7 @@ pub struct MovieClip {
current_frame: FrameNumber,
next_frame: FrameNumber,
total_frames: FrameNumber,
audio_stream: Option<AudioStreamHandle>,
children: HashMap<Depth, Cc<RefCell<DisplayObject>>>,
}
@ -36,6 +38,7 @@ impl MovieClip {
current_frame: 0,
next_frame: 1,
total_frames: 1,
audio_stream: None,
children: HashMap::new(),
}
}
@ -48,6 +51,7 @@ impl MovieClip {
is_playing: true,
current_frame: 0,
next_frame: 1,
audio_stream: None,
total_frames: num_frames,
children: HashMap::new(),
}
@ -99,6 +103,24 @@ impl MovieClip {
character.set_matrix(&Matrix::from(m));
}
}
fn sound_stream_head(
&mut self,
stream_info: &swf::SoundStreamInfo,
context: &mut UpdateContext,
_length: usize,
_version: u8,
) {
if self.audio_stream.is_none() {
self.audio_stream = Some(context.audio.register_stream(stream_info));
}
}
fn sound_stream_block(&mut self, samples: &[u8], context: &mut UpdateContext, _length: usize) {
if let Some(stream) = self.audio_stream {
context.audio.queue_stream_samples(stream, samples)
}
}
}
impl DisplayObjectUpdate for MovieClip {
@ -158,10 +180,13 @@ impl DisplayObjectUpdate for MovieClip {
self.children.remove(&depth);
}
Tag::SoundStreamHead(info) => self.sound_stream_head(&info, context, 0, 1),
Tag::SoundStreamHead2(info) => self.sound_stream_head(&info, context, 0, 2),
Tag::SoundStreamBlock(samples) => {
self.sound_stream_block(&samples[..], context, 0)
}
Tag::JpegTables(_) => (),
Tag::SoundStreamHead(_) => (),
Tag::SoundStreamHead2(_) => (),
Tag::SoundStreamBlock(_) => (),
Tag::DoAction(_) => (),
_ => info!("Umimplemented tag: {:?}", tag),
}

View File

@ -1,4 +1,5 @@
use crate::backend::render::RenderBackend;
use crate::audio::Audio;
use crate::backend::{audio::AudioBackend, render::RenderBackend};
use crate::color_transform::ColorTransformStack;
use crate::display_object::{DisplayObject, DisplayObjectUpdate};
use crate::library::Library;
@ -25,6 +26,7 @@ pub struct Player {
tag_stream: swf::read::Reader<Cursor<Vec<u8>>>,
render_context: RenderContext,
audio: Audio,
library: Library,
stage: Cc<RefCell<DisplayObject>>,
@ -40,6 +42,7 @@ pub struct Player {
impl Player {
pub fn new(
renderer: Box<RenderBackend>,
audio: Box<AudioBackend>,
swf_data: Vec<u8>,
) -> Result<Player, Box<std::error::Error>> {
let (swf, tag_stream) = swf::read::read_swf_header_decompressed(&swf_data[..]).unwrap();
@ -55,6 +58,9 @@ impl Player {
matrix_stack: MatrixStack::new(),
color_transform_stack: ColorTransformStack::new(),
},
audio: Audio::new(audio),
background_color: Color {
r: 255,
g: 255,
@ -75,7 +81,7 @@ impl Player {
pub fn tick(&mut self, dt: f64) {
self.frame_accumulator += dt;
let frame_time = 1000.0 / self.frame_rate;
info!("{} / {}", self.frame_accumulator, frame_time);
let needs_render = self.frame_accumulator >= frame_time;
while self.frame_accumulator >= frame_time {
self.frame_accumulator -= frame_time;
@ -104,6 +110,7 @@ impl Player {
library: &mut self.library,
background_color: &mut self.background_color,
renderer: &mut *self.render_context.renderer,
audio: &mut self.audio,
};
let mut stage = self.stage.borrow_mut();
@ -131,6 +138,7 @@ pub struct UpdateContext<'a> {
pub library: &'a mut Library,
pub background_color: &'a mut Color,
pub renderer: &'a mut RenderBackend,
pub audio: &'a mut Audio,
}
pub struct RenderContext {

View File

@ -1,4 +1,6 @@
use fluster_core::{backend::render::glium::GliumRenderBackend, Player};
use fluster_core::{
backend::audio::null::NullAudioBackend, backend::render::glium::GliumRenderBackend, Player,
};
use glutin::{ContextBuilder, Event, EventsLoop, WindowBuilder, WindowEvent};
use std::path::PathBuf;
use std::time::{Duration, Instant};
@ -28,8 +30,9 @@ fn run_player(input_path: PathBuf) -> Result<(), Box<std::error::Error>> {
let mut events_loop = EventsLoop::new();
let window_builder = WindowBuilder::new();
let windowed_context = ContextBuilder::new().build_windowed(window_builder, &events_loop)?;
let audio = NullAudioBackend::new();
let renderer = GliumRenderBackend::new(windowed_context)?;
let mut player = Player::new(Box::new(renderer), swf_data)?;
let mut player = Player::new(Box::new(renderer), Box::new(audio), swf_data)?;
let mut time = Instant::now();
loop {

View File

@ -14,6 +14,7 @@ default = ["console_error_panic_hook", "console_log"]
console_error_panic_hook = { version = "0.1.1", optional = true }
console_log = { version = "0.1", optional = true }
fluster_core = { path = "../core" }
log = "0.4"
js-sys = "0.3.19"
wasm-bindgen = "0.2"

View File

@ -1,4 +1,6 @@
use fluster_core::backend::render::web_canvas::WebCanvasRenderBackend;
use fluster_core::backend::{
audio::web::WebAudioBackend, render::web_canvas::WebCanvasRenderBackend,
};
use js_sys::Uint8Array;
use std::error::Error;
use wasm_bindgen::{prelude::*, JsValue};
@ -20,12 +22,16 @@ impl Player {
impl Player {
fn new_internal(canvas: HtmlCanvasElement, swf_data: Uint8Array) -> Result<Player, Box<Error>> {
console_error_panic_hook::set_once();
console_log::init_with_level(log::Level::Trace)?;
let mut data = vec![0; swf_data.length() as usize];
swf_data.copy_to(&mut data[..]);
let renderer = WebCanvasRenderBackend::new(&canvas)?;
let audio = WebAudioBackend::new()?;
let player = fluster_core::Player::new(Box::new(renderer), data)?;
let player = fluster_core::Player::new(Box::new(renderer), Box::new(audio), data)?;
// Update canvas size to match player size.
canvas.set_width(player.movie_width());