initial commit w/ working waveform from default stereo audio output monitor capture via pw

This commit is contained in:
ao kami 2025-07-13 17:22:52 +02:00
commit b7c7d1c5c9
5 changed files with 356 additions and 0 deletions

4
.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
/target
Cargo.lock
*.svg
*.data

85
Cargo.toml Normal file
View file

@ -0,0 +1,85 @@
[package]
name = "rumble-wrecker"
version = "0.1.0"
edition = "2024"
[dependencies]
iced = { version = "0.14.0-dev", features = [
# "default" # ["wgpu", "tiny-skia", "web-colors", "auto-detect-theme", "thread-pool"]
# [DEFAULT] Enables the `wgpu` GPU-accelerated renderer backend => ["iced_renderer/wgpu", "iced_widget/wgpu"]
# "wgpu",
# [DEFAULT] Enables the `tiny-skia` software renderer backend => ["iced_renderer/tiny-skia"]
# "tiny-skia",
# Enables the `image` widget => ["image-without-codecs", "image/default"]
"image",
# [INCLUDED] Enables the `image` widget, without any built-in codecs of the `image` crate => ["iced_widget/image", "dep:image"]
# "image-without-codecs",
# Enables the `svg` widget => ["iced_widget/svg"]
# "svg",
# Enables the `canvas` widget => ["iced_widget/canvas"]
"canvas",
# Enables the `qr_code` widget => ["iced_widget/qr_code"]
# "qr_code",
# Enables the `markdown` widget => ["iced_widget/markdown"]
# "markdown",
# Enables lazy widgets => ["iced_widget/lazy"]
# "lazy",
# Enables a debug view in native platforms (press F12) => ["iced_winit/debug", "iced_devtools"]
"debug",
# Enables time-travel debugging (very experimental!) => ["debug", "iced_devtools/time-travel"]
"time-travel",
# [DEFAULT] Enables the `thread-pool` futures executor as the `executor::Default` on native platforms => ["iced_futures/thread-pool"]
# "thread-pool",
# Enables `tokio` as the `executor::Default` on native platforms => ["iced_futures/tokio"]
"tokio",
# Enables `smol` as the `executor::Default` on native platforms => ["iced_futures/smol"]
# "smol",
# Enables querying system information => ["iced_winit/system"]
"system",
# [DEFAULT] Enables broken "sRGB linear" blending to reproduce color management of the Web => ["iced_renderer/web-colors"]
# "web-colors",
# Enables the WebGL backend => ["iced_renderer/webgl"]
# "webgl",
# Enables syntax highligthing => ["iced_highlighter", "iced_widget/highlighter"]
# "highlighter",
# Enables the advanced module => ["iced_core/advanced", "iced_widget/advanced"]
"advanced",
# Embeds Fira Sans into the final application; useful for testing and Wasm builds => ["iced_renderer/fira-sans"]
"fira-sans",
# [DEFAULT] Auto-detects light/dark mode for the built-in theme => ["iced_core/auto-detect-theme"]
# "auto-detect-theme",
# Enables strict assertions for debugging purposes at the expense of performance => ["iced_renderer/strict-assertions"]
"strict-assertions",
# Redraws on every runtime event, and not only when a widget requests it => ["iced_winit/unconditional-rendering"]
# "unconditional-rendering",
# Enables support for the `sipper` library => ["iced_runtime/sipper"
"sipper"
]}
pipewire = "0.8.0"
rustfft = "6.4.0"
[patch.crates-io]
iced = {git = "https://github.com/iced-rs/iced.git" }
[profile.release]
debug = true

78
src/main.rs Normal file
View file

@ -0,0 +1,78 @@
mod pw;
mod utils;
fn main() -> iced::Result {
iced::application(Spectrogram::new, Spectrogram::update, Spectrogram::view)
.subscription(Spectrogram::subscription)
// .title("Rumble Wrecker")
.run()
}
struct Spectrogram {
stream_left: std::sync::mpsc::Receiver<Vec<f32>>,
stream_right: std::sync::mpsc::Receiver<Vec<f32>>,
waveform_img: Rgba,
}
struct Rgba {
width: u32,
height: u32,
pixels: Vec<u8>,
}
#[derive(Debug, Clone, Copy)]
enum Message {
Tick,
}
impl Spectrogram {
fn new() -> Self {
let (tx_left, rx_left) = std::sync::mpsc::channel::<Vec<f32>>();
let (tx_right, rx_right) = std::sync::mpsc::channel::<Vec<f32>>();
std::thread::spawn(move || {
let _pw_res = crate::pw::init_pw(tx_left, tx_right);
});
let w = 1024u32;
let h = 256u32;
let p = vec![255; (w*h*4) as usize];
Self {
stream_left: rx_left,
stream_right: rx_right,
waveform_img: Rgba { width: w, height:h, pixels: p }
}
}
fn update(&mut self, message: Message) {
match message {
Message::Tick => {
let mut left_samples = Vec::<f32>::new();
let mut right_samples = Vec::<f32>::new();
while let Ok(mut left) = self.stream_left.try_recv() {
left_samples.append(&mut left);
}
while let Ok(mut right) = self.stream_right.try_recv() {
right_samples.append(&mut right);
}
utils::update_waveform(&mut self.waveform_img, (&left_samples, &right_samples));
}
}
}
fn view(&self) -> iced::Element<'_, Message> {
let img_handle = iced::widget::image::Handle::from_rgba(
self.waveform_img.width,
self.waveform_img.height,
self.waveform_img.pixels.clone()
);
iced::widget::center_x(iced::widget::image(img_handle)).into()
}
fn subscription(&self) -> iced::Subscription<Message> {
iced::time::every(std::time::Duration::from_millis(50)).map(|_| Message::Tick)
}
}

151
src/pw.rs Normal file
View file

@ -0,0 +1,151 @@
struct UserData {
format: pipewire::spa::param::audio::AudioInfoRaw,
}
pub fn init_pw(
tx_left: std::sync::mpsc::Sender<Vec<f32>>,
tx_right: std::sync::mpsc::Sender<Vec<f32>>,
) -> Result<(), pipewire::Error> {
pipewire::init();
// Pipewire inner
let mainloop = pipewire::main_loop::MainLoop::new(None)?;
let context = pipewire::context::Context::new(&mainloop)?;
let core = context.connect(None)?;
// Data to carry around in the callbacks
let data = UserData {
format: Default::default(),
};
let mut properties = pipewire::properties::properties! {
*pipewire::keys::MEDIA_TYPE => "Audio",
*pipewire::keys::MEDIA_CATEGORY => "Capture",
*pipewire::keys::MEDIA_ROLE => "Music",
};
// Uncomment and implement to select a specific target device
// properties.insert(*pipewire::keys::TARGET_OBJECT, target);
properties.insert(*pipewire::keys::STREAM_CAPTURE_SINK, "true");
let stream = pipewire::stream::Stream::new(&core, "audio-capture", properties)?;
let _listener = stream
.add_local_listener_with_user_data(data)
// PARAMETERS CALLBACK
.param_changed(|_stream_ref, user_data, id, param| {
// We need those params
let Some(param) = param else {
return;
};
// We want FORMAT params
if id != pipewire::spa::param::ParamType::Format.as_raw() {
return;
}
// We want the correct media TYPE/SUBTYPE
let (media_type, media_subtype) =
match pipewire::spa::param::format_utils::parse_format(param) {
Ok(v) => v,
Err(_) => return,
};
if media_type != pipewire::spa::param::format::MediaType::Audio
|| media_subtype != pipewire::spa::param::format::MediaSubtype::Raw
{
return;
}
// Parse POD user data for it to be usable
user_data
.format
.parse(param)
.expect("Failed to parse param changed to AudioInfoRaw");
println!(
"capturing rate:{} channels:{}",
user_data.format.rate(),
user_data.format.channels()
);
})
// PROCESSING CALLBACK
.process(move |stream, user_data| match stream.dequeue_buffer() {
None => println!("out of buffers"),
Some(mut buffer) => {
let datas = buffer.datas_mut();
if datas.is_empty() {
return;
}
let data = &mut datas[0];
let n_channels = user_data.format.channels() as usize;
let data_size = data.chunk().size() as usize;
let _n_samples = data_size / std::mem::size_of::<f32>();
if let Some(samples) = data.data() {
let stride = std::mem::size_of::<f32>();
// Copy the &[u8] slice
let samples_vec: Vec<u8> = Vec::from(&samples[0 .. data_size]);
// Filter left channel bytes with stride and convert into f32
let chan_l: Vec<f32> = samples_vec
.chunks_exact(stride)
.step_by(n_channels)
.map(|chnk| f32::from_le_bytes(chnk.try_into().unwrap()))
.collect();
// println!(
// "LEFT: captured {} samples (stride: {}) and sent {} elements",
// _n_samples / n_channels,
// stride,
// chan_l.len(),
// );
// Filter right channel bytes with stride and convert into f32
let chan_r: Vec<f32> = samples_vec
.chunks_exact(stride)
.skip(1)
.step_by(n_channels)
.map(|chnk| f32::from_le_bytes(chnk.try_into().unwrap()))
.collect();
// println!(
// "RIGHT: captured {} samples (stride: {}) and sent {} elements",
// _n_samples / n_channels,
// stride,
// chan_r.len(),
// );
// Send the channels data down the streams
tx_left.send(chan_l).unwrap();
tx_right.send(chan_r).unwrap();
}
}
})
.register()?;
// Build params
let mut audio_info = pipewire::spa::param::audio::AudioInfoRaw::new();
audio_info.set_format(pipewire::spa::param::audio::AudioFormat::F32LE);
let obj = pipewire::spa::pod::Object {
type_: pipewire::spa::utils::SpaTypes::ObjectParamFormat.as_raw(),
id: pipewire::spa::param::ParamType::EnumFormat.as_raw(),
properties: audio_info.into(),
};
let values: Vec<u8> = pipewire::spa::pod::serialize::PodSerializer::serialize(
std::io::Cursor::new(Vec::new()),
&pipewire::spa::pod::Value::Object(obj),
)
.unwrap()
.0
.into_inner();
let mut params = [pipewire::spa::pod::Pod::from_bytes(&values).unwrap()];
stream.connect(
pipewire::spa::utils::Direction::Input,
None,
pipewire::stream::StreamFlags::AUTOCONNECT
| pipewire::stream::StreamFlags::MAP_BUFFERS
| pipewire::stream::StreamFlags::RT_PROCESS,
&mut params,
)?;
mainloop.run();
Ok(())
}

38
src/utils.rs Normal file
View file

@ -0,0 +1,38 @@
pub fn update_waveform( rgba: &mut crate::Rgba, samples: (&Vec<f32>, &Vec<f32>) ) {
let (left_samples, right_samples) = samples;
rgba.pixels.fill(255);
let left_column_width : usize = left_samples.len() / rgba.width as usize;
let right_column_width : usize = right_samples.len() / rgba.width as usize;
let mut last_rows = ((rgba.height/2) as usize, (rgba.height/2) as usize);
for x in 0..rgba.width as usize {
// LEFT CHANNEL
let column_values : &[f32] = &left_samples[x*left_column_width..(x+1)*left_column_width];
let column_average : f32 = (column_values.iter().cloned().reduce(|acc, v| acc+v).unwrap_or(0f32) / column_values.len() as f32).clamp(-0.99, 0.99);
let row = (128f32 - (column_average * 128f32)).round() as usize;
// Draw vertical lines from last row
let range : Vec<usize> = if row <= last_rows.0 {(row..=last_rows.0).collect()} else {(last_rows.0..=row).rev().collect()};
for row in range {
let coord = (row * rgba.width as usize * 4) + (x * 4);
rgba.pixels[coord+0] = 0;
rgba.pixels[coord+1] = 0;
}
last_rows.0 = row;
// RIGHT CHANNEL
let column_values : &[f32] = &right_samples[x*right_column_width..(x+1)*right_column_width];
let column_average : f32 = (column_values.iter().cloned().reduce(|acc, v| acc+v).unwrap_or(0f32) / column_values.len() as f32).clamp(-0.99, 0.99);
let row = (128f32 - (column_average * 128f32)).round() as usize;
// Draw vertical lines from last row
let range : Vec<usize> = if row <= last_rows.1 {(row..=last_rows.1).collect()} else {(last_rows.1..=row).rev().collect()};
for row in range {
let coord = (row * rgba.width as usize * 4) + (x * 4);
rgba.pixels[coord+1] = 0;
rgba.pixels[coord+2] = 0;
}
last_rows.1 = row;
}
}