Initial Commit

This commit is contained in:
Exil Productions
2025-12-19 20:18:56 +01:00
commit 0cb370f8ec
22 changed files with 1804 additions and 0 deletions

1
v2a-converter/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

20
v2a-converter/Cargo.toml Normal file
View File

@@ -0,0 +1,20 @@
[package]
name = "v2a-converter"
version = "0.1.0"
edition = "2024"
[dependencies]
clap = { version = "4.0", features = ["derive"] }
rayon = "1.10"
flate2 = "1.0"
indicatif = "0.17"
anyhow = "1.0"
bincode = "2.0"
byteorder = "1.5"
crossbeam-channel = "0.5"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
lru = "0.12"
once_cell = "1.20"
tempfile = "3.10"
num_cpus = "1.16"

Binary file not shown.

Binary file not shown.

16
v2a-converter/README.md Normal file
View File

@@ -0,0 +1,16 @@
# V2A-Converter
This is the Converter where you can convert mp4 files or other Media Formats to the v2a Format
You will have to install ffmpeg if not already installed.
Use the help argument to see the commands you can use.
## How to build
1. install rustup and with it cargo
2. run `cargo build`or `cargo run <args>` to run it directly
## Examples
I put some pre converted files in the Examples Folder which i used to test the converter and player

View File

@@ -0,0 +1,35 @@
use anyhow::{Context, Result};
use std::io::Write;
use std::process::{Command, Stdio};
use tempfile::NamedTempFile;
pub fn extract_audio(video_path: &str) -> Result<Vec<u8>> {
let output = Command::new("ffmpeg")
.args([
"-i", video_path,
"-vn",
"-acodec", "pcm_s16le",
"-ar", "44100",
"-ac", "2",
"-f", "wav",
"-",
])
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()
.context("Failed to start ffmpeg for audio extraction")?
.wait_with_output()
.context("Failed to read audio output")?;
if !output.status.success() {
anyhow::bail!("ffmpeg audio extraction failed");
}
Ok(output.stdout)
}
pub fn extract_audio_to_temp(video_path: &str) -> Result<(NamedTempFile, u64)> {
let mut temp = NamedTempFile::new()?;
let audio_data = extract_audio(video_path)?;
temp.write_all(&audio_data)?;
let size = audio_data.len() as u64;
Ok((temp, size))
}

View File

@@ -0,0 +1,79 @@
use crate::color::{Ansi256Palette, Rgb};
use crate::V2AFrame;
pub struct BlockProcessor {
palette: Ansi256Palette,
}
impl BlockProcessor {
pub fn new() -> Self {
Self {
palette: Ansi256Palette::new(),
}
}
pub fn process_frame(
&self,
rgb_data: &[u8],
original_width: u32,
original_height: u32,
) -> V2AFrame {
let block_width = (original_width / 2) as u16;
let block_height = (original_height / 2) as u16;
let mut frame = V2AFrame::new(block_width, block_height);
let stride = (original_width * 3) as usize;
for y in 0..block_height {
let base_y = (y as u32) * 2;
for x in 0..block_width {
let base_x = (x as u32) * 2;
let mut top_r = 0u32;
let mut top_g = 0u32;
let mut top_b = 0u32;
let mut bottom_r = 0u32;
let mut bottom_g = 0u32;
let mut bottom_b = 0u32;
for dy in 0..2 {
let row = base_y + dy;
let row_start = row as usize * stride;
for dx in 0..2 {
let col = base_x + dx;
let pixel_start = row_start + (col as usize) * 3;
let r = rgb_data[pixel_start] as u32;
let g = rgb_data[pixel_start + 1] as u32;
let b = rgb_data[pixel_start + 2] as u32;
if dy == 0 {
top_r += r;
top_g += g;
top_b += b;
} else {
bottom_r += r;
bottom_g += g;
bottom_b += b;
}
}
}
let top_avg = Rgb::new(
(top_r / 2) as u8,
(top_g / 2) as u8,
(top_b / 2) as u8,
);
let bottom_avg = Rgb::new(
(bottom_r / 2) as u8,
(bottom_g / 2) as u8,
(bottom_b / 2) as u8,
);
let top_idx = self.palette.find_closest(top_avg);
let bottom_idx = self.palette.find_closest(bottom_avg);
frame.pixel_pairs[(y as usize) * (block_width as usize) + (x as usize)] =
[top_idx, bottom_idx];
}
}
frame
}
}
impl Default for BlockProcessor {
fn default() -> Self {
Self::new()
}
}

104
v2a-converter/src/color.rs Normal file
View File

@@ -0,0 +1,104 @@
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use lru::LruCache;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Rgb {
pub r: u8,
pub g: u8,
pub b: u8,
}
impl Rgb {
pub fn new(r: u8, g: u8, b: u8) -> Self {
Self { r, g, b }
}
fn distance_squared(&self, other: &Rgb) -> u32 {
let dr = self.r as i32 - other.r as i32;
let dg = self.g as i32 - other.g as i32;
let db = self.b as i32 - other.b as i32;
(dr * dr + dg * dg + db * db) as u32
}
}
pub struct Ansi256Palette {
colors: Vec<Rgb>,
cache: Arc<Mutex<LruCache<Rgb, u8>>>,
}
impl Ansi256Palette {
pub fn new() -> Self {
let mut colors = Vec::with_capacity(256);
let standard = [
(0, 0, 0),
(128, 0, 0),
(0, 128, 0),
(128, 128, 0),
(0, 0, 128),
(128, 0, 128),
(0, 128, 128),
(192, 192, 192),
(128, 128, 128),
(255, 0, 0),
(0, 255, 0),
(255, 255, 0),
(0, 0, 255),
(255, 0, 255),
(0, 255, 255),
(255, 255, 255),
];
for &(r, g, b) in &standard {
colors.push(Rgb::new(r, g, b));
}
let steps = [0, 95, 135, 175, 215, 255];
for r in 0..6 {
for g in 0..6 {
for b in 0..6 {
colors.push(Rgb::new(steps[r], steps[g], steps[b]));
}
}
}
for i in 0..24 {
let gray = 8 + i * 10;
colors.push(Rgb::new(gray, gray, gray));
}
assert_eq!(colors.len(), 256);
Self {
colors,
cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(65536).unwrap()))),
}
}
pub fn find_closest(&self, rgb: Rgb) -> u8 {
{
let mut cache = self.cache.lock().unwrap();
if let Some(&index) = cache.get(&rgb) {
return index;
}
}
let mut best_index = 0;
let mut best_dist = u32::MAX;
for (i, palette_color) in self.colors.iter().enumerate() {
let dist = rgb.distance_squared(palette_color);
if dist < best_dist {
best_dist = dist;
best_index = i;
}
}
let best_index = best_index as u8;
let mut cache = self.cache.lock().unwrap();
cache.put(rgb, best_index);
best_index
}
pub fn get_color(&self, index: u8) -> Rgb {
self.colors[index as usize]
}
}
impl Default for Ansi256Palette {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,126 @@
use crate::audio;
use crate::block::BlockProcessor;
use crate::video::{VideoInfo, FrameExtractor};
use crate::{V2AHeader, V2AFrame};
use anyhow::{Context, Result};
use crossbeam_channel::bounded;
use indicatif::{ProgressBar, ProgressStyle};
use std::fs::File;
use std::io::{BufWriter, Write};
use std::sync::Arc;
use std::thread;
pub struct Converter {
num_workers: usize,
}
impl Converter {
pub fn new(num_workers: usize) -> Self {
Self { num_workers }
}
pub fn convert(&self, input_path: &str, output_path: &str) -> Result<()> {
let info = VideoInfo::from_path(input_path)
.context("Failed to get video info")?;
println!("Video: {}x{} @ {:.2} fps, {} frames", info.width, info.height, info.fps, info.frame_count);
let progress = ProgressBar::new(info.frame_count as u64);
progress.set_style(ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}")
.unwrap());
progress.set_message("Extracting audio...");
let audio_data = audio::extract_audio(input_path)
.context("Audio extraction failed")?;
let audio_size = audio_data.len() as u64;
progress.set_message("Audio extracted");
let file = File::create(output_path)
.context("Failed to create output file")?;
let mut writer = BufWriter::new(file);
let header = V2AHeader::new(
info.frame_count,
info.width,
info.height,
info.fps,
audio_size,
);
header.write(&mut writer)
.context("Failed to write header")?;
writer.write_all(&audio_data)
.context("Failed to write audio data")?;
progress.set_message("Audio written");
let (raw_tx, raw_rx) = bounded::<(usize, Vec<u8>)>(self.num_workers * 2);
let (processed_tx, processed_rx) = bounded::<(usize, V2AFrame)>(self.num_workers * 2);
let writer_thread = thread::spawn(move || -> Result<()> {
let mut next_frame = 0;
let mut buffer = std::collections::BTreeMap::new();
while let Ok((idx, frame)) = processed_rx.recv() {
buffer.insert(idx, frame);
while let Some(frame) = buffer.remove(&next_frame) {
frame.write_compressed(&mut writer)
.context("Failed to write compressed frame")?;
next_frame += 1;
}
}
for (idx, frame) in buffer.into_iter() {
if idx != next_frame {
anyhow::bail!("Missing frame {}, got {}", next_frame, idx);
}
frame.write_compressed(&mut writer)?;
next_frame += 1;
}
writer.flush()?;
Ok(())
});
let block_processor = Arc::new(BlockProcessor::new());
let width = info.width;
let height = info.height;
let worker_handles: Vec<_> = (0..self.num_workers)
.map(|_| {
let raw_rx = raw_rx.clone();
let processed_tx = processed_tx.clone();
let block_processor = block_processor.clone();
let progress = progress.clone();
thread::spawn(move || -> Result<()> {
while let Ok((idx, rgb_data)) = raw_rx.recv() {
let frame = block_processor.process_frame(
&rgb_data,
width,
height,
);
processed_tx.send((idx, frame))
.context("Failed to send processed frame")?;
progress.inc(1);
}
Ok(())
})
})
.collect();
let mut extractor = FrameExtractor::new(input_path, info.width, info.height)
.context("Failed to start frame extractor")?;
let frame_size = (info.width * info.height * 3) as usize;
let mut frame_buffer = vec![0; frame_size];
let mut frame_index = 0;
while extractor.read_frame(&mut frame_buffer)
.context("Failed to read frame")?
{
raw_tx.send((frame_index, frame_buffer.clone()))
.context("Failed to send raw frame")?;
frame_index += 1;
}
drop(raw_tx);
for handle in worker_handles {
handle.join().unwrap()?;
}
drop(processed_tx);
writer_thread.join().unwrap()?;
progress.finish_with_message("Conversion complete");
Ok(())
}
}

143
v2a-converter/src/lib.rs Normal file
View File

@@ -0,0 +1,143 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use flate2::{read::GzDecoder, write::GzEncoder, Compression};
use std::io::{Read, Write};
pub mod audio;
pub mod block;
pub mod color;
pub mod converter;
pub mod video;
pub const MAGIC: &[u8; 4] = b"V2A\0";
pub const VERSION: u16 = 2;
#[derive(Debug, Clone, Copy)]
pub struct V2AHeader {
pub magic: [u8; 4],
pub version: u16,
pub frame_count: u32,
pub original_width: u32,
pub original_height: u32,
pub fps: f32,
pub audio_size: u64,
pub _padding: [u8; 2],
}
impl V2AHeader {
pub fn new(
frame_count: u32,
original_width: u32,
original_height: u32,
fps: f32,
audio_size: u64,
) -> Self {
Self {
magic: *MAGIC,
version: VERSION,
frame_count,
original_width,
original_height,
fps,
audio_size,
_padding: [0; 2],
}
}
pub fn write<W: Write>(&self, mut writer: W) -> std::io::Result<()> {
writer.write_all(&self.magic)?;
writer.write_u16::<LittleEndian>(self.version)?;
writer.write_u32::<LittleEndian>(self.frame_count)?;
writer.write_u32::<LittleEndian>(self.original_width)?;
writer.write_u32::<LittleEndian>(self.original_height)?;
writer.write_f32::<LittleEndian>(self.fps)?;
writer.write_u64::<LittleEndian>(self.audio_size)?;
writer.write_all(&self._padding)?;
Ok(())
}
pub fn read<R: Read>(mut reader: R) -> std::io::Result<Self> {
let mut magic = [0; 4];
reader.read_exact(&mut magic)?;
if &magic != MAGIC {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid magic",
));
}
let version = reader.read_u16::<LittleEndian>()?;
if version != VERSION {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Unsupported version",
));
}
let frame_count = reader.read_u32::<LittleEndian>()?;
let original_width = reader.read_u32::<LittleEndian>()?;
let original_height = reader.read_u32::<LittleEndian>()?;
let fps = reader.read_f32::<LittleEndian>()?;
let audio_size = reader.read_u64::<LittleEndian>()?;
let mut padding = [0; 2];
reader.read_exact(&mut padding)?;
Ok(Self {
magic,
version,
frame_count,
original_width,
original_height,
fps,
audio_size,
_padding: padding,
})
}
}
#[derive(Debug, Clone)]
pub struct V2AFrame {
pub width: u16,
pub height: u16,
pub pixel_pairs: Vec<[u8; 2]>,
}
impl V2AFrame {
pub fn new(width: u16, height: u16) -> Self {
Self {
width,
height,
pixel_pairs: vec![[0, 0]; (width as usize) * (height as usize)],
}
}
pub fn write_compressed<W: Write>(&self, writer: W) -> std::io::Result<()> {
let mut encoder = GzEncoder::new(writer, Compression::best());
encoder.write_u16::<LittleEndian>(self.width)?;
encoder.write_u16::<LittleEndian>(self.height)?;
for pair in &self.pixel_pairs {
encoder.write_all(pair)?;
}
encoder.finish()?;
Ok(())
}
pub fn read_compressed<R: Read>(reader: R) -> std::io::Result<Self> {
let mut decoder = GzDecoder::new(reader);
let width = decoder.read_u16::<LittleEndian>()?;
let height = decoder.read_u16::<LittleEndian>()?;
let pixel_count = (width as usize) * (height as usize);
let mut pixel_pairs = Vec::with_capacity(pixel_count);
for _ in 0..pixel_count {
let mut pair = [0; 2];
decoder.read_exact(&mut pair)?;
pixel_pairs.push(pair);
}
Ok(Self {
width,
height,
pixel_pairs,
})
}
}
pub use converter::Converter;
pub use block::BlockProcessor;
pub use color::Ansi256Palette;
pub use video::{VideoInfo, FrameExtractor};

54
v2a-converter/src/main.rs Normal file
View File

@@ -0,0 +1,54 @@
use clap::{Parser, Subcommand};
use v2a_converter::{Converter, V2AHeader};
use std::fs::File;
use std::io::BufReader;
#[derive(Parser)]
#[command(name = "v2a-converter")]
#[command(about = "Convert video to V2A format", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Convert {
input: String,
output: String,
#[arg(short, long, default_value_t = num_cpus::get())]
workers: usize,
#[arg(long)]
fps: Option<f32>,
},
Info {
file: String,
},
}
fn main() -> anyhow::Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Convert { input, output, workers, fps } => {
let converter = Converter::new(workers);
converter.convert(&input, &output)?;
println!("Successfully converted {} to {}", input, output);
}
Commands::Info { file } => {
let f = File::open(&file)?;
let mut reader = BufReader::new(f);
let header = V2AHeader::read(&mut reader)?;
println!("V2A File: {}", file);
println!(" Magic: {}", String::from_utf8_lossy(&header.magic));
println!(" Version: {}", header.version);
println!(" Frames: {}", header.frame_count);
println!(" Original resolution: {}x{}", header.original_width, header.original_height);
println!(" FPS: {:.2}", header.fps);
println!(" Audio size: {} bytes", header.audio_size);
let metadata = std::fs::metadata(&file)?;
println!(" Total file size: {} bytes", metadata.len());
println!(" Frame data size: {} bytes", metadata.len() - 32 - header.audio_size);
}
}
Ok(())
}

158
v2a-converter/src/video.rs Normal file
View File

@@ -0,0 +1,158 @@
use anyhow::{Context, Result};
use serde::Deserialize;
use std::process::{Command, Stdio};
use std::io::Read;
#[derive(Debug, Clone)]
pub struct VideoInfo {
pub width: u32,
pub height: u32,
pub frame_count: u32,
pub fps: f32,
pub duration: f32,
}
fn parse_fraction(fraction: &str) -> Option<(u32, u32)> {
let parts: Vec<&str> = fraction.split('/').collect();
if parts.len() == 2 {
let num = parts[0].parse().ok()?;
let den = parts[1].parse().ok()?;
Some((num, den))
} else {
None
}
}
impl VideoInfo {
pub fn from_path(path: &str) -> Result<Self> {
let output = Command::new("ffprobe")
.args([
"-v",
"quiet",
"-print_format",
"json",
"-show_format",
"-show_streams",
path,
])
.output()
.context("Failed to execute ffprobe")?;
if !output.status.success() {
anyhow::bail!("ffprobe failed: {}", String::from_utf8_lossy(&output.stderr));
}
let probe: FfprobeOutput = serde_json::from_slice(&output.stdout)
.context("Failed to parse ffprobe JSON")?;
let video_stream = probe
.streams
.into_iter()
.find(|s| s.codec_type == "video")
.context("No video stream found")?;
let width = video_stream.width.unwrap_or(0);
let height = video_stream.height.unwrap_or(0);
let nb_frames = video_stream.nb_frames.and_then(|s| s.parse().ok());
let avg_frame_rate = video_stream.avg_frame_rate.as_deref()
.and_then(parse_fraction)
.unwrap_or((0, 1));
let fps = if avg_frame_rate.1 == 0 { 0.0 } else { avg_frame_rate.0 as f32 / avg_frame_rate.1 as f32 };
let duration = video_stream.duration
.as_deref()
.and_then(|s| s.parse().ok())
.or_else(|| probe.format.duration.as_deref().and_then(|s| s.parse().ok()))
.unwrap_or(0.0);
let frame_count = nb_frames.unwrap_or_else(|| {
(duration * fps).round() as u32
});
Ok(Self {
width,
height,
frame_count,
fps,
duration,
})
}
}
#[derive(Debug, Deserialize)]
struct FfprobeOutput {
streams: Vec<Stream>,
format: Format,
}
#[derive(Debug, Deserialize)]
struct Stream {
codec_type: String,
width: Option<u32>,
height: Option<u32>,
#[serde(rename = "nb_frames")]
nb_frames: Option<String>,
#[serde(rename = "avg_frame_rate")]
avg_frame_rate: Option<String>,
duration: Option<String>,
}
#[derive(Debug, Deserialize)]
struct Format {
duration: Option<String>,
}
pub struct FrameExtractor {
width: u32,
height: u32,
child: std::process::Child,
stdout: std::process::ChildStdout,
frame_size: usize,
}
impl FrameExtractor {
pub fn new(path: &str, width: u32, height: u32) -> Result<Self> {
let mut child = Command::new("ffmpeg")
.args([
"-i", path,
"-vf", "format=rgb24",
"-f", "rawvideo",
"-pix_fmt", "rgb24",
"-",
])
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()
.context("Failed to start ffmpeg")?;
let stdout = child.stdout.take().context("Failed to capture stdout")?;
let frame_size = (width * height * 3) as usize;
Ok(Self {
width,
height,
child,
stdout,
frame_size,
})
}
pub fn read_frame(&mut self, buffer: &mut [u8]) -> Result<bool> {
buffer.iter_mut().for_each(|b| *b = 0);
let mut read = 0;
while read < self.frame_size {
match self.stdout.read(&mut buffer[read..]) {
Ok(0) => return Ok(false),
Ok(n) => read += n,
Err(e) if e.kind() == std::io::ErrorKind::Interrupted => continue,
Err(e) => return Err(e.into()),
}
}
Ok(true)
}
pub fn width(&self) -> u32 { self.width }
pub fn height(&self) -> u32 { self.height }
}
impl Drop for FrameExtractor {
fn drop(&mut self) {
let _ = self.child.kill();
}
}