exporter: Added exporter tool for capturing images of swfs (related to #523)

This commit is contained in:
Nathan Adams 2020-05-05 15:27:10 +02:00 committed by Mike Welsh
parent 2343074c56
commit 9ab03b4da6
7 changed files with 262 additions and 4 deletions

16
Cargo.lock generated
View File

@ -595,6 +595,22 @@ dependencies = [
"num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "exporter"
version = "0.1.0"
dependencies = [
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"image 0.23.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ruffle_core 0.1.0",
"ruffle_render_wgpu 0.1.0",
"sample 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"structopt 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"wgpu 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"wgpu-native 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "flate2"
version = "1.0.14"

View File

@ -6,6 +6,7 @@ members = [
"swf",
"web",
"scanner",
"exporter",
"render/canvas",
"render/wgpu",

View File

@ -64,12 +64,21 @@ all of the flash files and report on the success of such a task.
* `cargo run --package=ruffle_scanner -- folder/with/swfs/ results.csv`
### Exporter
If you have a swf and would like to capture an image of it, you may use the exporter tool.
This currently requires hardware acceleration, but can be run headless (with no window).
* `cargo run --package=exporter -- path/to/file.swf`
* `cargo run --package=exporter -- path/to/file.swf path/to/screenshots --frames 5`
## Structure
- `core` contains the core emulator and common code
- `desktop` contains the desktop client (uses `wgpu-rs`)
- `web` contains the web client (uses `wasm-bindgen`)
- `scanner` contains a utility to bulk parse swf files
- `exporter` contains a utility to generate PNG screenshots of a swf file
## Sponsors

22
exporter/Cargo.toml Normal file
View File

@ -0,0 +1,22 @@
[package]
name = "exporter"
version = "0.1.0"
authors = ["Nathan Adams <dinnerbone@dinnerbone.com>"]
edition = "2018"
[dependencies]
ruffle_core = { path = "../core" }
ruffle_render_wgpu = { path = "../render/wgpu" }
env_logger = "0.7.1"
image = "0.23.4"
log = "0.4"
sample = "0.10.0"
structopt = "0.3.14"
futures = "0.3.4"
wgpu = "0.5"
wgpu-native = "0.5"
[features]
avm_debug = ["ruffle_core/avm_debug"]
render_debug_labels = ["ruffle_render_wgpu/render_debug_labels"]
lzma = ["ruffle_core/lzma"]

120
exporter/src/main.rs Normal file
View File

@ -0,0 +1,120 @@
use futures::executor::block_on;
use ruffle_core::backend::audio::NullAudioBackend;
use ruffle_core::backend::input::NullInputBackend;
use ruffle_core::backend::navigator::NullNavigatorBackend;
use ruffle_core::tag_utils::SwfMovie;
use ruffle_core::Player;
use ruffle_render_wgpu::target::TextureTarget;
use ruffle_render_wgpu::WgpuRenderBackend;
use std::fs::create_dir_all;
use std::path::{Path, PathBuf};
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
struct Opt {
/// The swf file to export frames from
#[structopt(name = "swf", parse(from_os_str))]
swf: PathBuf,
/// The file or directory (if multiple frames) to store the capture in
#[structopt(name = "output", parse(from_os_str))]
output_path: Option<PathBuf>,
/// Number of frames to capture
#[structopt(short = "f", long = "frames", default_value = "1")]
frames: u32,
}
fn take_screenshot<P: AsRef<Path>>(
swf_path: P,
output: P,
frames: u32,
) -> Result<(), Box<dyn std::error::Error>> {
let movie = SwfMovie::from_path(swf_path)?;
let adapter = block_on(wgpu::Adapter::request(
&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::Default,
compatible_surface: None,
},
wgpu::BackendBit::PRIMARY,
))
.unwrap();
let (device, queue) = block_on(adapter.request_device(&wgpu::DeviceDescriptor {
extensions: wgpu::Extensions {
anisotropic_filtering: false,
},
limits: wgpu::Limits::default(),
}));
let target = TextureTarget::new(&device, (movie.width(), movie.height()));
let player = Player::new(
Box::new(WgpuRenderBackend::new(device, queue, target)?),
Box::new(NullAudioBackend::new()),
Box::new(NullNavigatorBackend::new()),
Box::new(NullInputBackend::new()),
movie,
)?;
for i in 0..frames {
player.lock().unwrap().run_frame();
player.lock().unwrap().render();
let mut player = player.lock().unwrap();
let renderer = player
.renderer_mut()
.downcast_mut::<WgpuRenderBackend<TextureTarget>>()
.unwrap();
let target = renderer.target();
if let Some(image) = target.capture(renderer.device()) {
if frames > 1 {
let mut path = PathBuf::from(output.as_ref());
path.push(format!("frame_{}.png", i));
image.save(&path)?;
} else {
image.save(&output)?;
}
}
}
Ok(())
}
fn main() {
let opt: Opt = Opt::from_args();
let output = opt.output_path.clone().unwrap_or_else(|| {
let mut result = PathBuf::new();
if opt.frames == 1 {
result.set_file_name(opt.swf.file_stem().unwrap());
result.set_extension("png");
} else {
result.set_file_name(opt.swf.file_stem().unwrap());
}
result
});
if opt.frames > 1 {
let _ = create_dir_all(&output);
}
match take_screenshot(opt.swf.clone(), output.clone(), opt.frames) {
Ok(_) => {
if opt.frames == 1 {
println!(
"Saved first frame of {} to {}",
opt.swf.to_string_lossy(),
output.to_string_lossy()
);
} else {
println!(
"Saved first {} frames of {} to {}",
opt.frames,
opt.swf.to_string_lossy(),
output.to_string_lossy()
);
}
}
Err(e) => {
println!("Couldn't capture swf: {}", e);
std::process::exit(1);
}
}
}

View File

@ -671,6 +671,14 @@ impl<T: RenderTarget> WgpuRenderBackend<T> {
handle
}
pub fn target(&self) -> &T {
&self.target
}
pub fn device(&self) -> &wgpu::Device {
&self.device
}
fn draw_rect(&mut self, x: f32, y: f32, width: f32, height: f32, color: Color) {
let (frame_output, encoder) = if let Some((frame_output, encoder)) = &mut self.current_frame
{
@ -781,7 +789,7 @@ impl<T: RenderTarget> WgpuRenderBackend<T> {
}
}
impl<T: RenderTarget> RenderBackend for WgpuRenderBackend<T> {
impl<T: RenderTarget + 'static> RenderBackend for WgpuRenderBackend<T> {
fn set_viewport_dimensions(&mut self, width: u32, height: u32) {
// Avoid panics from creating 0-sized framebuffers.
let width = std::cmp::max(width, 1);
@ -1399,7 +1407,11 @@ impl<T: RenderTarget> RenderBackend for WgpuRenderBackend<T> {
});
let register_buffer =
replace(&mut self.register_encoder, new_register_encoder).finish();
self.queue.submit(&[register_buffer, encoder.finish()]);
self.target.submit(
&self.device,
&self.queue,
&[register_buffer, encoder.finish()],
);
}
}

View File

@ -1,3 +1,5 @@
use futures::executor::block_on;
use image::RgbaImage;
use raw_window_handle::HasRawWindowHandle;
use std::fmt::Debug;
@ -17,6 +19,13 @@ pub trait RenderTarget: Debug + 'static {
fn height(&self) -> u32;
fn get_next_texture(&mut self) -> Result<Self::Frame, wgpu::TimeOut>;
fn submit(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
command_buffers: &[wgpu::CommandBuffer],
);
}
#[derive(Debug)]
@ -78,6 +87,15 @@ impl RenderTarget for SwapChainTarget {
fn get_next_texture(&mut self) -> Result<Self::Frame, wgpu::TimeOut> {
self.swap_chain.get_next_texture().map(SwapChainTargetFrame)
}
fn submit(
&self,
_device: &wgpu::Device,
queue: &wgpu::Queue,
command_buffers: &[wgpu::CommandBuffer],
) {
queue.submit(command_buffers);
}
}
#[derive(Debug)]
@ -85,6 +103,7 @@ pub struct TextureTarget {
size: wgpu::Extent3d,
texture: wgpu::Texture,
format: wgpu::TextureFormat,
buffer: wgpu::Buffer,
}
#[derive(Debug)]
@ -103,10 +122,10 @@ impl TextureTarget {
height: size.1,
depth: 1,
};
let label = create_debug_label!("Render target texture");
let texture_label = create_debug_label!("Render target texture");
let format = wgpu::TextureFormat::Bgra8Unorm;
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: label.as_deref(),
label: texture_label.as_deref(),
size,
array_layer_count: 1,
mip_level_count: 1,
@ -115,10 +134,33 @@ impl TextureTarget {
format,
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT | wgpu::TextureUsage::COPY_SRC,
});
let buffer_label = create_debug_label!("Render target buffer");
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: buffer_label.as_deref(),
size: size.width as u64 * size.height as u64 * 4,
usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::MAP_READ,
});
Self {
size,
texture,
format,
buffer,
}
}
pub fn capture(&self, device: &wgpu::Device) -> Option<RgbaImage> {
let buffer_future = self
.buffer
.map_read(0, self.size.width as u64 * self.size.height as u64 * 4);
device.poll(wgpu::Maintain::Wait);
match block_on(buffer_future) {
Ok(map) => {
RgbaImage::from_raw(self.size.width, self.size.height, Vec::from(map.as_slice()))
}
Err(e) => {
log::error!("Unknown error reading capture buffer: {:?}", e);
None
}
}
}
}
@ -141,6 +183,13 @@ impl RenderTarget for TextureTarget {
format: self.format,
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT | wgpu::TextureUsage::COPY_SRC,
});
let buffer_label = create_debug_label!("Render target buffer");
self.buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: buffer_label.as_deref(),
size: width as u64 * height as u64 * 4,
usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::MAP_READ,
});
}
fn format(&self) -> wgpu::TextureFormat {
@ -158,4 +207,33 @@ impl RenderTarget for TextureTarget {
fn get_next_texture(&mut self) -> Result<Self::Frame, wgpu::TimeOut> {
Ok(TextureTargetFrame(self.texture.create_default_view()))
}
fn submit(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
command_buffers: &[wgpu::CommandBuffer],
) {
let label = create_debug_label!("Render target transfer encoder");
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: label.as_deref(),
});
encoder.copy_texture_to_buffer(
wgpu::TextureCopyView {
texture: &self.texture,
mip_level: 0,
array_layer: 0,
origin: wgpu::Origin3d::ZERO,
},
wgpu::BufferCopyView {
buffer: &self.buffer,
offset: 0,
bytes_per_row: self.width() * 4,
rows_per_image: 0,
},
self.size,
);
queue.submit(command_buffers);
queue.submit(&[encoder.finish()]);
}
}