desktop: Add --timedemo benchmark

Add --timedemo for benchmarking, which will run the given SWF as
quickly as possible for 5000 frames or the end of the root
timeline, whichever comes first. The total duration will be output
upon completion.
This commit is contained in:
Mike Welsh 2020-12-25 13:42:14 -08:00
parent a3b4074fc4
commit 8222d3095b
4 changed files with 189 additions and 62 deletions

View File

@ -205,6 +205,10 @@ pub struct Player {
/// contexts to other parts of the player. It can be used to ensure the
/// player lives across `await` calls in async code.
self_reference: Option<Weak<Mutex<Self>>>,
/// The current frame of the main timeline, if available.
/// The first frame is frame 1.
current_frame: Option<u16>,
}
#[allow(clippy::too_many_arguments)]
@ -291,6 +295,7 @@ impl Player {
time_til_next_timer: None,
storage,
max_execution_duration: Duration::from_secs(15),
current_frame: None,
};
player.mutate_with_update_context(|context| {
@ -882,6 +887,12 @@ impl Player {
self.needs_render = false;
}
/// The current frame of the main timeline, if available.
/// The first frame is frame 1.
pub fn current_frame(&self) -> Option<u16> {
self.current_frame
}
pub fn audio(&self) -> &Audio {
&self.audio
}
@ -1097,6 +1108,7 @@ impl Player {
logging,
needs_render,
max_execution_duration,
current_frame,
) = (
self.player_version,
&self.swf,
@ -1117,6 +1129,7 @@ impl Player {
self.log.deref_mut(),
&mut self.needs_render,
self.max_execution_duration,
&mut self.current_frame,
);
self.gc_arena.mutate(|gc_context, gc_root| {
@ -1176,8 +1189,15 @@ impl Player {
let ret = f(&mut update_context);
*current_frame = update_context
.levels
.get(&0)
.and_then(|root| root.as_movie_clip())
.map(|clip| clip.current_frame());
// Hovered object may have been updated; copy it back to the GC root.
root_data.mouse_hovered_object = update_context.mouse_hovered_object;
ret
})
}

View File

@ -81,6 +81,9 @@ struct Opt {
/// (Optional) Replace all embedded http URLs with https
#[clap(long, case_insensitive = true, takes_value = false)]
upgrade_to_https: bool,
#[clap(long, case_insensitive = true, takes_value = false)]
timedemo: bool,
}
#[cfg(feature = "render_trace")]
@ -105,7 +108,11 @@ fn main() {
let opt = Opt::parse();
let ret = run_player(opt);
let ret = if opt.timedemo {
run_timedemo(opt)
} else {
run_player(opt)
};
if let Err(e) = ret {
eprintln!("Fatal error:\n{}", e);
@ -113,16 +120,13 @@ fn main() {
}
}
fn load_movie_from_path(
movie_url: Url,
proxy: Option<Url>,
) -> Result<SwfMovie, Box<dyn std::error::Error>> {
fn load_movie_from_path(movie_url: Url, opt: &Opt) -> Result<SwfMovie, Box<dyn std::error::Error>> {
if movie_url.scheme() == "file" {
if let Ok(path) = movie_url.to_file_path() {
return SwfMovie::from_path(path);
}
}
let proxy = proxy.and_then(|url| url.as_str().parse().ok());
let proxy = opt.proxy.as_ref().and_then(|url| url.as_str().parse().ok());
let builder = HttpClient::builder()
.proxy(proxy)
.redirect_policy(RedirectPolicy::Follow);
@ -130,7 +134,22 @@ fn load_movie_from_path(
let res = client.get(movie_url.to_string())?;
let mut buffer: Vec<u8> = Vec::new();
res.into_body().read_to_end(&mut buffer)?;
SwfMovie::from_data(&buffer, Some(movie_url.to_string()))
let mut movie = SwfMovie::from_data(&buffer, Some(movie_url.to_string()))?;
// Set query parameters.
for parameter in &opt.parameters {
let mut split = parameter.splitn(2, '=');
if let (Some(key), Some(value)) = (split.next(), split.next()) {
movie.parameters_mut().insert(key, value.to_string(), true);
} else {
movie
.parameters_mut()
.insert(&parameter, "".to_string(), true);
}
}
Ok(movie)
}
fn run_player(opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
@ -161,20 +180,9 @@ fn run_player(opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
}
};
let mut movie = load_movie_from_path(movie_url.to_owned(), opt.proxy.to_owned())?;
let movie = load_movie_from_path(movie_url.to_owned(), &opt)?;
let movie_size = LogicalSize::new(movie.width(), movie.height());
for parameter in &opt.parameters {
let mut split = parameter.splitn(2, '=');
if let (Some(key), Some(value)) = (split.next(), split.next()) {
movie.parameters_mut().insert(key, value.to_string(), true);
} else {
movie
.parameters_mut()
.insert(&parameter, "".to_string(), true);
}
}
let icon_bytes = include_bytes!("../assets/favicon-32.rgba");
let icon = Icon::from_rgba(icon_bytes.to_vec(), 32, 32)?;
@ -239,6 +247,7 @@ fn run_player(opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
let mut mouse_pos = PhysicalPosition::new(0.0, 0.0);
let mut time = Instant::now();
let mut next_frame_time = Instant::now();
loop {
// Poll UI events
event_loop.run(move |event, _window_target, control_flow| {
@ -361,6 +370,77 @@ fn run_player(opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
}
}
fn run_timedemo(opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
let movie_url = match &opt.input_path {
Some(path) => {
if path.exists() {
let absolute_path = path.canonicalize().unwrap_or_else(|_| path.to_owned());
Url::from_file_path(absolute_path)
.map_err(|_| "Path must be absolute and cannot be a URL")?
} else {
Url::parse(path.to_str().unwrap_or_default())
.map_err(|_| "Input path is not a file and could not be parsed as a URL.")?
}
}
None => return Err("Input file necessary for timedemo".into()),
};
let movie = load_movie_from_path(movie_url, &opt)?;
let movie_frames = Some(movie.header().num_frames);
let viewport_width = 1920;
let viewport_height = 1080;
let renderer = Box::new(WgpuRenderBackend::for_offscreen(
(viewport_width, viewport_height),
opt.graphics.into(),
opt.power.into(),
trace_path(&opt),
)?);
let audio: Box<dyn AudioBackend> = Box::new(NullAudioBackend::new());
let navigator = Box::new(ruffle_core::backend::navigator::NullNavigatorBackend::new());
let input = Box::new(ruffle_core::backend::input::NullInputBackend::new());
let storage = Box::new(ruffle_core::backend::storage::MemoryStorageBackend::default());
let user_interface = Box::new(ruffle_core::backend::ui::NullUiBackend::new());
let locale = Box::new(locale::DesktopLocaleBackend::new());
let log = Box::new(NullLogBackend::new());
let player = Player::new(
renderer,
audio,
navigator,
input,
storage,
locale,
log,
user_interface,
)?;
player.lock().unwrap().set_root_movie(Arc::new(movie));
player.lock().unwrap().set_is_playing(true);
player
.lock()
.unwrap()
.set_viewport_dimensions(viewport_width, viewport_height);
println!("Running {}...", opt.input_path.unwrap().to_string_lossy(),);
let start = Instant::now();
let mut num_frames = 0;
const MAX_FRAMES: u32 = 5000;
let mut player = player.lock().unwrap();
while num_frames < MAX_FRAMES && player.current_frame() < movie_frames {
player.run_frame();
player.render();
num_frames += 1;
}
let end = Instant::now();
let duration = end.duration_since(start);
println!("Ran {} frames in {}s.", num_frames, duration.as_secs_f32());
Ok(())
}
/// Hides the Win32 console if we were not launched from the command line.
fn win32_hide_console() {
#[cfg(windows)]

View File

@ -1,5 +1,4 @@
use clap::Clap;
use futures::executor::block_on;
use image::RgbaImage;
use indicatif::{ProgressBar, ProgressStyle};
use ruffle_core::backend::audio::NullAudioBackend;
@ -374,24 +373,13 @@ fn trace_path(_opt: &Opt) -> Option<&Path> {
fn main() -> Result<(), Box<dyn Error>> {
let opt: Opt = Opt::parse();
let instance = wgpu::Instance::new(opt.graphics.into());
let adapter = block_on(instance.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: opt.power.into(),
compatible_surface: None,
}))
.ok_or(
"This tool requires hardware acceleration, but no compatible graphics device was found.",
)?;
let (device, queue) = block_on(adapter.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: Default::default(),
limits: wgpu::Limits::default(),
shader_validation: false,
},
let descriptors = WgpuRenderBackend::<TextureTarget>::build_descriptors(
opt.graphics.into(),
instance,
None,
opt.power.into(),
trace_path(&opt),
))?;
let descriptors = Descriptors::new(device, queue)?;
)?;
if opt.swf.is_file() {
capture_single_swf(descriptors, &opt)?;

View File

@ -11,6 +11,7 @@ use ruffle_core::backend::render::{
use ruffle_core::shape_utils::{DistilledShape, DrawPath};
use std::borrow::Cow;
use swf::{CharacterId, DefineBitsLossless, Glyph, GradientInterpolation};
use target::TextureTarget;
use bytemuck::{Pod, Zeroable};
use futures::executor::block_on;
@ -178,40 +179,41 @@ impl WgpuRenderBackend<SwapChainTarget> {
format_list(&get_backend_names(backend), "and")
);
}
let instance = wgpu::Instance::new(backend);
let surface = unsafe { instance.create_surface(window) };
let adapter = block_on(instance.request_adapter(&wgpu::RequestAdapterOptions {
let descriptors = Self::build_descriptors(
backend,
instance,
Some(&surface),
power_preference,
compatible_surface: Some(&surface),
}))
.ok_or_else(|| {
let names = get_backend_names(backend);
if names.is_empty() {
"Ruffle requires hardware acceleration, but no compatible graphics device was found (no backend provided?)".to_string()
} else {
format!("Ruffle requires hardware acceleration, but no compatible graphics device was found supporting {}", format_list(&names, "or"))
}
})?;
let (device, queue) = block_on(adapter.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: Default::default(),
limits: wgpu::Limits::default(),
shader_validation: false,
},
trace_path,
))?;
let descriptors = Descriptors::new(device, queue)?;
)?;
let target = SwapChainTarget::new(surface, size, &descriptors.device);
Self::new(descriptors, target)
}
}
impl WgpuRenderBackend<TextureTarget> {
pub fn for_offscreen(
size: (u32, u32),
backend: wgpu::BackendBit,
power_preference: wgpu::PowerPreference,
trace_path: Option<&Path>,
) -> Result<Self, Error> {
if wgpu::BackendBit::SECONDARY.contains(backend) {
log::warn!(
"{} graphics backend support may not be fully supported.",
format_list(&get_backend_names(backend), "and")
);
}
let instance = wgpu::Instance::new(backend);
let descriptors =
Self::build_descriptors(backend, instance, None, power_preference, trace_path)?;
let target = TextureTarget::new(&descriptors.device, size);
Self::new(descriptors, target)
}
}
impl<T: RenderTarget> WgpuRenderBackend<T> {
pub fn new(mut descriptors: Descriptors, target: T) -> Result<Self, Error> {
let extent = wgpu::Extent3d {
@ -275,6 +277,43 @@ impl<T: RenderTarget> WgpuRenderBackend<T> {
})
}
pub fn build_descriptors(
backend: wgpu::BackendBit,
instance: wgpu::Instance,
surface: Option<&wgpu::Surface>,
power_preference: wgpu::PowerPreference,
trace_path: Option<&Path>,
) -> Result<Descriptors, Error> {
let adapter = block_on(instance.request_adapter(&wgpu::RequestAdapterOptions {
power_preference,
compatible_surface: surface,
}))
.ok_or_else(|| {
let names = get_backend_names(backend);
if names.is_empty() {
"Ruffle requires hardware acceleration, but no compatible graphics device was found (no backend provided?)".to_string()
} else {
format!("Ruffle requires hardware acceleration, but no compatible graphics device was found supporting {}", format_list(&names, "or"))
}
})?;
let (device, queue) = block_on(adapter.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::PUSH_CONSTANTS,
limits: wgpu::Limits {
max_push_constant_size: (std::mem::size_of::<Transforms>()
+ std::mem::size_of::<ColorAdjustments>())
as u32,
..Default::default()
},
shader_validation: false,
},
trace_path,
))?;
Descriptors::new(device, queue)
}
pub fn descriptors(self) -> Descriptors {
self.descriptors
}