Compare commits

...

9 Commits

Author SHA1 Message Date
csd4ni3l
aecba51e77 add DeviceTrait to fix Windows crashing when getting audio device name 2026-01-23 19:40:55 +01:00
csd4ni3l
f7d6bb9d6b fix windows trying to return normal input which doenst work right now 2026-01-23 19:16:50 +01:00
csd4ni3l
3aca3d7a4a Add libudev dependency to Linux 2026-01-23 18:53:23 +01:00
csd4ni3l
6e19790c93 Make debug builds much less heavy by disabling unnecessary debug information, add ALSA headers to Linux and rename the step 2026-01-23 18:45:09 +01:00
csd4ni3l
d8a7db6b78 fix main.yaml incorrect naming for windows exe verify, add wayland & x11 package installs that are needed for on Linux 2026-01-23 18:28:18 +01:00
csd4ni3l
4b74e211f6 Fix Linux build not installing mold & clang 2026-01-23 18:21:51 +01:00
csd4ni3l
b539327672 Improve README, make bevy wayland & x11 a linux-only dependency, add rust building, fix dead code warnings, try adding normal playing with virtual (didnt work, comments), fix pulse not working sometimes by adding a manual playback move to the virtual mic, make windows, linux and other virtual mic code per-system 2026-01-23 18:18:38 +01:00
csd4ni3l
4fff1d4709 Add warning for other OSes and fix macos warning 2026-01-23 14:27:08 +01:00
csd4ni3l
6fcd4a4154 Update README to include clang dependency, move virtual mic creation to the app, add length tracking to the app and remove sounds that already ended, move from Systemtime to Instant and virtual mic recreation inside the app if anything bugs out 2026-01-23 14:20:35 +01:00
6 changed files with 279 additions and 32 deletions

95
.github/workflows/main.yaml vendored Normal file
View File

@@ -0,0 +1,95 @@
name: Build and Release
on: push
jobs:
build:
name: Build on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
include:
- os: ubuntu-22.04
platform: linux
- os: windows-latest
platform: windows
steps:
- name: Check-out repository
uses: actions/checkout@v4
- name: Cache
uses: actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Install mold, clang, Wayland, ALSA and x11 headers and dependencies (Linux)
if: matrix.os == 'ubuntu-22.04'
run: |
sudo apt update
sudo apt install -y build-essential clang cmake pkg-config mold \
libwayland-dev libxkbcommon-dev libegl1-mesa-dev \
libwayland-egl-backend-dev \
libx11-dev libxext-dev libxrandr-dev libxinerama-dev libxcursor-dev \
libxi-dev libxfixes-dev libxrender-dev \
libfreetype6-dev libfontconfig1-dev libgl1-mesa-dev \
libasound2-dev libudev-dev
shell: bash
- name: Build
run: cargo build --release --verbose
- name: Verify executable (Linux)
if: matrix.os == 'ubuntu-22.04'
run: test target/release/soundboard
shell: bash
- name: Verify executable (Windows)
if: matrix.os == 'windows-latest'
run: Test-Path target\release\soundboard.exe
shell: pwsh
- name: Upload build artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.platform }}
path: |
target/release/soundboard
target/release/soundboard.exe
release:
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download All Build Artifacts
uses: actions/download-artifact@v4
with:
path: downloads
- name: Create release (if missing) and upload artifacts to tag
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
TAG="latest"
echo "Target release tag: $TAG"
if gh release view "$TAG" >/dev/null 2>&1; then
echo "Release $TAG already exists; will upload assets with --clobber"
else
gh release create "$TAG" \
--title "$TAG" \
--notes "Automated build for $TAG"
fi
# Upload the executables directly (no zip files)
gh release upload "$TAG" downloads/linux/soundboard --clobber
gh release upload "$TAG" downloads/windows/soundboard.exe --clobber

View File

@@ -14,7 +14,13 @@ serde_json = "1.0.146"
[dependencies.bevy]
version = "0.17.3"
features = [
"wayland",
"x11",
"bevy_winit",
]
[profile.dev.package."*"]
opt-level = 2
debug = false
[target.'cfg(target_os = "linux")'.dependencies.bevy]
version = "0.17.3"
features = ["wayland", "x11", "bevy_winit"]

View File

@@ -1,4 +1,15 @@
Soundboard made in Rust & Bevy. My first Rust project.
For compilation on Linux, you will need the mold linker to speed things up.
On an arch machine for example, do `sudo pacman -S mold`
# Support & Requirements
- On all OSes, you need to still select the device inside the app you want to use it in.
## Linux
- Needs the `mold` linker and `clang` to compile fast
- ALSA & PulseAudio/Pipewire-pulse is a requirement
## Windows
- Needs the VB-Cable driver (https://vb-audio.com/Cable/)
## MacOS & Other
- Might work as a music player with the default output device.
- Not supported and not planned.

View File

@@ -1,8 +1,4 @@
#!/bin/bash
pactl load-module module-null-sink sink_name=VirtualMic sink_properties=device.description="Virtual_Microphone"
pactl load-module module-remap-source master=VirtualMic.monitor source_name=VirtualMicSource source_properties=device.description="Virtual_Mic_Source"
PULSE_SINK=VirtualMic cargo run
cargo run
pactl list modules short | grep "Virtual_Microphone" | cut -f1 | xargs -L1 pactl unload-module
pactl list modules short | grep "Virtual_Mic_Source" | cut -f1 | xargs -L1 pactl unload-module

7
soundboard.desktop Normal file
View File

@@ -0,0 +1,7 @@
[Desktop Entry]
Name=csd4ni3l Soundboard
Comment=A simple soundboard made in Rust
Exec=/opt/cssoundboard/run_linux.sh
Terminal=false
Type=Application
Categories=Application;

View File

@@ -3,28 +3,33 @@ use bevy::{
prelude::*,
};
use std::{collections::HashMap, fs::File, io::BufReader, path::Path};
use std::{collections::HashMap, fs::File, io::BufReader, path::Path, process::Command};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use bevy_egui::{
EguiContextSettings, EguiContexts, EguiPlugin, EguiPrimaryContextPass, EguiStartupSet, egui,
};
use rodio::{Decoder, OutputStream, OutputStreamBuilder, Sink, mixer::Mixer, cpal, cpal::traits::{HostTrait, DeviceTrait}};
use rodio::{Decoder, OutputStream, OutputStreamBuilder, Sink, Source, cpal::{self, Device, Host, traits::HostTrait, traits::DeviceTrait}};
#[derive(Serialize, Deserialize)]
struct JSONData {
tabs: Vec<String>,
}
#[allow(dead_code)]
struct PlayingSound {
file_path: String,
start_time: f32,
length: f32,
virtual_sink: Sink,
// normal_sink: Sink
}
struct SoundSystem {
sink: Sink,
virtual_mic_stream: OutputStream,
// normal_output_stream: OutputStream,
paused: bool
}
@@ -37,19 +42,104 @@ struct AppState {
sound_system: SoundSystem
}
use std::time::{SystemTime, UNIX_EPOCH};
const ALLOWED_FILE_EXTENSIONS: [&str; 4] = ["mp3", "wav", "flac", "ogg"];
fn move_playback_to_sink() {
let command_output = Command::new("pactl")
.args(&["-f", "json", "list", "sink-inputs"])
.output()
.expect("Failed to execute process");
if command_output.status.success() {
let sink_json: Value = serde_json::from_str(str::from_utf8(&command_output.stdout).expect("Failed to convert to string")).expect("Failed to parse sink JSON output");
for device in sink_json.as_array().unwrap_or(&vec![]) {
if device["properties"]["node.name"] == "alsa_playback.soundboard" {
let index = device["index"].as_u64().expect("Device index is not a number").to_string();
Command::new("pactl")
.args(&["move-sink-input", index.as_str(), "VirtualMic"]) // as_str is needed here as you cannot instantly dereference a growing String (Rust...)
.output()
.expect("Failed to execute process");
}
}
}
}
fn create_virtual_mic() -> OutputStream {
let host: Host;
// let original_host: Host;
// let normal_output: Device;
let virtual_mic: Device;
#[cfg(target_os = "windows")]
{
host = cpal::host_from_id(cpal::HostId::Wasapi).expect("Could not initialize audio routing using WasAPI");
virtual_mic = host.output_devices().expect("Could not list Output devices").find(|device| {
device.name().ok().map(|name|{
name.contains("CABLE Input") || name.contains("VB-Audio")
}).unwrap_or(false)
}).expect("Could not get default output device");
// normal_output = host.default_output_device().expect("Could not get default output device");
return OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream");
// return (OutputStreamBuilder::from_device(normal_output).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"), OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"));
}
#[cfg(target_os = "linux")]
{
// original_host = cpal::host_from_id(cpal::HostId::Alsa).expect("Could not initialize audio routing using ALSA");
// normal_output = original_host.default_output_device().expect("Could not get default output device");
Command::new("pactl")
.args(&["load-module", "module-null-sink", "sink_name=VirtualMic", "sink_properties=device.description=\"Virtual_Microphone\""])
.output()
.expect("Failed to execute process");
Command::new("pactl")
.args(&["load-module", "module-remap-source", "master=VirtualMic.monitor", "source_name=VirtualMicSource", "source_properties=device.description=\"Virtual_Mic_Source\""])
.output()
.expect("Failed to execute process");
host = cpal::host_from_id(cpal::HostId::Alsa).expect("Could not initialize audio routing using ALSA"); // Alsa needed so pulse default works
virtual_mic = host.default_output_device().expect("Could not get default output device");
let virtual_mic_stream = OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream");
move_playback_to_sink();
return virtual_mic_stream;
// return (OutputStreamBuilder::from_device(normal_output).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"), OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"));
}
#[allow(unreachable_code)] {
println!("Unknown/unsupported OS. Audio support may not work or may route to default output (headset, headphones, etc).");
host = cpal::default_host();
virtual_mic = host.default_output_device().expect("Could not get default output device");
return OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream")
// normal_output = host.default_output_device().expect("Could not get default output device");
// return (OutputStreamBuilder::from_device(normal_output).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"), OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream"));
}
}
fn reload_sound() -> OutputStream {
if cfg!(target_os = "linux"){
let script = r#"
pactl list modules short | grep "Virtual_Microphone" | cut -f1 | xargs -L1 pactl unload-module
pactl list modules short | grep "Virtual_Mic_Source" | cut -f1 | xargs -L1 pactl unload-module
"#;
let output = Command::new("sh")
.arg("-c")
.arg(script)
.output()
.expect("Failed to execute process");
if output.status.success() {
println!("Modules unloaded successfully.");
} else {
println!("Error: {}", String::from_utf8_lossy(&output.stderr));
}
}
return create_virtual_mic();
}
fn main() {
let host = cpal::default_host();
let virtual_mic = host.default_output_device().expect("Could not get default output device");
println!("Using device: {}", virtual_mic.name().unwrap_or_default());
let stream_handle = OutputStreamBuilder::from_device(virtual_mic).expect("Unable to open default audio device").open_stream().expect("Failed to open stream");
let mixer = stream_handle.mixer();
let sink = Sink::connect_new(&mixer);
let virtual_mic_stream = create_virtual_mic();
// let (normal_output_stream, virtual_mic_stream) = create_virtual_mic();
App::new()
.insert_resource(ClearColor(Color::BLACK))
@@ -76,7 +166,8 @@ fn main() {
current_directory: String::new(),
currently_playing: Vec::new(),
sound_system: SoundSystem {
sink,
virtual_mic_stream,
// normal_output_stream,
paused: false
}
})
@@ -144,18 +235,25 @@ fn update_ui_scale_factor_system(
}
fn play_sound(file_path: String, app_state: &mut AppState) {
let file = BufReader::new(File::open(&file_path).unwrap());
let src = Decoder::new(file).unwrap();
app_state.sound_system.sink.append(src);
let virtual_file = File::open(&file_path).unwrap();
let virtual_src = Decoder::new(BufReader::new(virtual_file)).unwrap();
let virtual_sink = Sink::connect_new(&app_state.sound_system.virtual_mic_stream.mixer());
let length = virtual_src.total_duration().expect("Could not get source duration").as_secs_f32();
virtual_sink.append(virtual_src);
virtual_sink.play();
// let normal_file = File::open(&file_path).unwrap();
// let normal_src = Decoder::new(BufReader::new(normal_file)).unwrap();
// let normal_sink = Sink::connect_new(&app_state.sound_system.normal_output_stream.mixer());
// normal_sink.append(normal_src);
// normal_sink.play();
let start = SystemTime::now();
let since_the_epoch = start
.duration_since(UNIX_EPOCH)
.expect("time should go forward");
app_state.currently_playing.push(PlayingSound {
file_path: file_path.clone(),
start_time: since_the_epoch.as_secs_f32(),
length,
virtual_sink,
// normal_sink
})
}
@@ -217,6 +315,36 @@ fn ui_system(mut contexts: EguiContexts, mut app_state: ResMut<AppState>) -> Res
{
println!("Youtube downloader!");
}
if ui
.add_sized(
[ui.available_width(), available_height / 15.0],
egui::Button::new("Reload sound system"),
)
.clicked()
{
app_state.currently_playing.clear();
app_state.sound_system.virtual_mic_stream = reload_sound();
// (app_state.sound_system.normal_output_stream, app_state.sound_system.virtual_mic_stream) = reload_sound();
println!("Sucessfully reloaded sound system!");
}
});
egui::TopBottomPanel::bottom("currently_playing").show(ctx, |ui| {
ui.horizontal(|ui| {
if app_state.sound_system.paused {
ui.heading("Paused");
}
else {
ui.heading("Playing");
}
ui.vertical(|ui| {
for playing_sound in &app_state.currently_playing {
ui.label(format!("{} - {:.2} / {:.2}", playing_sound.file_path, playing_sound.virtual_sink.get_pos().as_secs_f32(), playing_sound.length));
}
})
});
});
egui::CentralPanel::default().show(ctx, |ui| {
@@ -267,6 +395,10 @@ fn ui_system(mut contexts: EguiContexts, mut app_state: ResMut<AppState>) -> Res
});
}
});
app_state.currently_playing.retain(|playing_sound| {
playing_sound.virtual_sink.get_pos().as_secs_f32() <= (playing_sound.length - 0.01) // 0.01 offset needed here because of floating point errors and so its not exact
});
Ok(())
}