This commit is contained in:
2025-07-19 03:45:41 +02:00
parent 21164df8cd
commit a1f829e2e6
10 changed files with 345 additions and 49 deletions

11
src-tauri/Cargo.lock generated
View File

@@ -1945,6 +1945,16 @@ dependencies = [
"serde_json", "serde_json",
] ]
[[package]]
name = "kanal"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3953adf0cd667798b396c2fa13552d6d9b3269d7dd1154c4c416442d1ff574"
dependencies = [
"futures-core",
"lock_api",
]
[[package]] [[package]]
name = "keyboard-types" name = "keyboard-types"
version = "0.7.0" version = "0.7.0"
@@ -2637,6 +2647,7 @@ dependencies = [
"cpal", "cpal",
"crossbeam-channel", "crossbeam-channel",
"event-listener", "event-listener",
"kanal",
"moka", "moka",
"opus", "opus",
"parking_lot", "parking_lot",

View File

@@ -34,3 +34,4 @@ bytes = "1.10"
moka = {version = "0.12", features = ["future"] } moka = {version = "0.12", features = ["future"] }
arc-swap = "1.7" arc-swap = "1.7"
crossbeam-channel = "0.5" crossbeam-channel = "0.5"
kanal = "0.1"

View File

@@ -4,6 +4,7 @@ use tauri::{AppHandle, Emitter, Listener};
use tokio; use tokio;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use crate::core::capture::AudioCapture; use crate::core::capture::AudioCapture;
use crate::domain::audio_client::AudioClientManager;
use crate::domain::event::{Event, EventBus}; use crate::domain::event::{Event, EventBus};
use crate::network::udp::UdpSession; use crate::network::udp::UdpSession;
use crate::runtime::dispatcher::Dispatcher; use crate::runtime::dispatcher::Dispatcher;
@@ -12,7 +13,7 @@ pub struct OxSpeakApp {
// Communication inter-thread // Communication inter-thread
event_bus: EventBus, event_bus: EventBus,
dispatcher: Dispatcher, dispatcher: Dispatcher,
event_rx: Option<mpsc::Receiver<Event>>, event_rx: kanal::AsyncReceiver<Event>,
// Network // Network
udp_session: UdpSession, udp_session: UdpSession,
@@ -20,6 +21,9 @@ pub struct OxSpeakApp {
// audio // audio
audio_capture: AudioCapture, audio_capture: AudioCapture,
// audio_client
audio_client_manager: AudioClientManager,
// Tauri handle // Tauri handle
tauri_handle: AppHandle tauri_handle: AppHandle
} }
@@ -41,31 +45,34 @@ impl OxSpeakApp {
println!("Initializing UDP session"); println!("Initializing UDP session");
let udp_session = UdpSession::new(event_bus.clone()); let udp_session = UdpSession::new(event_bus.clone());
// UdpClient
println!("Initializing UDP client");
let audio_client_manager = AudioClientManager::new();
// Dispatcher - Communication inter-components // Dispatcher - Communication inter-components
println!("Initializing event dispatcher"); println!("Initializing event dispatcher");
let mut dispatcher = Dispatcher::new(event_bus.clone(), udp_session.clone(), tauri_handle.clone()); let mut dispatcher = Dispatcher::new(event_bus.clone(), udp_session.clone(), audio_client_manager.clone() ,tauri_handle.clone());
println!("OxSpeakApp initialization complete"); println!("OxSpeakApp initialization complete");
Self { Self {
event_bus, event_bus,
dispatcher, dispatcher,
event_rx: Some(event_rx), event_rx,
udp_session, udp_session,
audio_capture, audio_capture,
audio_client_manager,
tauri_handle, tauri_handle,
} }
} }
pub async fn start(&mut self) { pub async fn start(&mut self) {
println!("Starting OxSpeakApp"); println!("Starting OxSpeakApp");
// dispatcher - lancement du process pour la communication inter-process // dispatcher - lancement du process pour la communication inter-process
println!("Starting event dispatcher"); println!("Starting 4 instances of event dispatcher");
let mut dispatcher = self.dispatcher.clone(); for _ in 0..4 {
// Prendre l'ownership du receiver (event_rx) let dispatcher = self.dispatcher.clone();
if let Some(event_rx) = self.event_rx.take() { let event_rx = self.event_rx.clone();
tokio::spawn(async move { tokio::spawn(async move {
dispatcher.start(event_rx).await dispatcher.start(event_rx).await
}); });

View File

@@ -99,7 +99,7 @@ impl AudioCapture {
}); });
stream.play().unwrap(); stream.play().unwrap();
self.steam = Some(stream); self.steam = Some(stream);
println!("Audio stream started"); println!("Audio capture stream started");
// Audio processing worker // Audio processing worker
println!("Starting audio processing worker"); println!("Starting audio processing worker");

View File

@@ -1 +1,225 @@
// aller pick l'audio des clients use std::sync::{atomic, Arc};
use std::sync::atomic::{AtomicBool, AtomicUsize};
use bytes::Bytes;
use tokio::sync::{mpsc, Notify};
use crate::utils::ringbuf::{RingBufReader, RingBufWriter, RingBuffer};
pub struct Mixer {
// writer: Arc<RingBufWriter<i16>>,
// reader: Arc<RingBufReader<i16>>,
pre_buffer: Arc<PreBuffer>,
worker_sender: Option<mpsc::UnboundedSender<Vec<i16>>>,
buffer: Bytes // 1920 frames
}
impl Mixer {
pub fn new() -> Self {
let (writer, reader) = RingBuffer::<i16>::new(1024).split();
Self {
// writer: Arc::new(writer),
// reader: Arc::new(reader),
pre_buffer: Arc::new(PreBuffer::new()),
worker_sender: None,
}
}
// Démarrer le worker de pré-traitement
pub async fn start(&mut self){
let (sender, mut receiver) = mpsc::unbounded_channel::<Vec<i16>>();
// let (writer, reader) = RingBuffer::<Bytes>::new(1024).split();
self.worker_sender = Some(sender);
let prebuffer = self.pre_buffer.clone();
// worker de pré-traitement
tokio::spawn(async move {
while let Some(data) = receiver.recv().await {
// data doit exactement faire 960 (mono) ou 1920 (stéréo), 20ms
// si il fait 960, on converti en stéréo
// on écrit dans un buffer de pré-traitement
// si data rempli pas les condition, on ignore
// on vérifie la taille des données
match data.len() {
960 => {
// Mono 20ms @ 48kHz - convertir en stéréo
let stereo_data = Self::mono_to_stereo(data);
// push dans un buffer de pré-traitement
prebuffer.push(stereo_data).await;
},
1920 => {
// push dans un buffer de pré-traitement
prebuffer.push(data).await;
}
_ => {
println!("⚠️ Données audio ignorées - taille incorrecte: {} bytes", data.len());
}
}
}
});
}
// Envoyer des données au pré-traitement
pub async fn write(&self, data: Vec<i16>){
if let Some(sender) = self.worker_sender.as_ref() {
let _ = sender.send(data);
}
}
// S'occupe de générer la trame qui sera lu dans 20ms
pub async fn mix(&self){
// récupérer le buffer de pré-traitement, qui sera tableau de Vec<i16> (le lock ? pour éviter que le worker de pré-traitement continue d'alimenter)
// le mixer (sum de chaque trame ?)
// le mettre dans un buffer, qui sera accessible par AudioPlayback
// écraser le buffer de pré-traitement
// (libérer le lock)
let frames = self.pre_buffer.read_all().await;
let mixed_frame = if frames.is_empty() {
[0i16; 1920]
} else {
Self::mix_frames(&frames)
};
}
// Récupérer la trame présente qui est déjà pré-généré par mix
pub async fn read(&self) -> [i16; 1920]{
let buffer = self.buffer;
// vider le buffer
self.buffer =
buffer
}
}
impl Mixer {
// Functions helpers
fn mono_to_stereo(mono_samples: Vec<i16>) -> Vec<i16> {
let mut stereo_data = Vec::with_capacity(mono_samples.len() * 2);
// Chaque échantillon mono devient deux échantillons stéréo identiques
for sample in mono_samples {
stereo_data.push(sample); // Canal gauche
stereo_data.push(sample); // Canal droit
}
stereo_data
}
// Mixer plusieurs trames
fn mix_frames(frames: &[Vec<i16>]) -> [i16; 1920] {
let mut mixed = [0i32; 1920];
for frame in frames {
for (i, &sample) in frame.iter().enumerate() {
if i < 1920 {
mixed[i] += sample as i32;
}
}
}
let mut result = [0i16; 1920];
let count = frames.len() as i32;
for (i, &sample) in mixed.iter().enumerate() {
result[i] = (sample / count).clamp(i16::MIN as i32, i16::MAX as i32) as i16;
}
result
}
}
/// Pre buffer
#[derive(Clone)]
struct PreBuffer {
sender: Arc<kanal::AsyncSender<Vec<i16>>>,
receiver: Arc<kanal::AsyncReceiver<Vec<i16>>>,
is_being_read: Arc<AtomicBool>,
read_done_notify: Arc<Notify>,
}
impl PreBuffer {
fn new() -> Self {
let (sender, reader) = kanal::unbounded_async::<Vec<i16>>();
Self {
sender: Arc::new(sender),
receiver: Arc::new(reader),
is_being_read: Arc::new(AtomicBool::new(false)),
read_done_notify: Arc::new(Notify::new()),
}
}
async fn push(&self, frame: Vec<i16>) {
if self.is_being_read.load(atomic::Ordering::Acquire) {
self.read_done_notify.notified().await;
}
let _ = self.sender.send(frame);
}
async fn read_all(&self) -> Vec<Vec<i16>> {
self.is_being_read.store(true, atomic::Ordering::Release);
let mut frames = Vec::new();
while let Ok(frame) = self.receiver.recv().await {
frames.push(frame);
}
// Libérer et notifier les writers en attente
self.is_being_read.store(false, atomic::Ordering::Release);
self.read_done_notify.notify_waiters();
frames
}
}
// struct PreBuffer {
// // Vec dynamique pour stocker les trames
// frames: Vec<Vec<i16>>,
// // Compteur atomique pour le nombre de trames disponibles
// frame_count: AtomicUsize,
// // Flag atomique pour indiquer si le buffer est en cours de lecture
// is_being_read: AtomicBool,
// read_done_notify: Arc<Notify>,
// }
//
// impl PreBuffer {
// fn new() -> Self {
// Self {
// frames: Vec::new(),
// frame_count: AtomicUsize::new(0),
// is_being_read: AtomicBool::new(false),
// read_done_notify: Arc::new(Notify::new()),
// }
// }
//
// async fn push(&mut self, frame: Vec<i16>) {
// if self.is_being_read.load(atomic::Ordering::Acquire) {
// self.read_done_notify.notified().await;
// }
//
// self.frames.push(frame);
// self.frame_count.fetch_add(1, atomic::Ordering::Release);
// }
//
// fn reset(&mut self) {
// self.frame_count.store(0, atomic::Ordering::Release);
// }
//
// fn len(&self) -> usize {
// self.frame_count.load(atomic::Ordering::Acquire)
// }
//
// fn read_all(&mut self) -> Vec<Vec<i16>> {
// self.is_being_read.store(true, atomic::Ordering::Release);
//
// let frames = std::mem::take(&mut self.frames);
// self.frame_count.store(0, atomic::Ordering::Release);
//
// // Libérer et notifier les writers en attente
// self.is_being_read.store(false, atomic::Ordering::Release);
// self.read_done_notify.notify_waiters();
//
// frames
// }
// }
//

View File

@@ -2,8 +2,8 @@ use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::thread::JoinHandle; use std::thread::JoinHandle;
use cpal::{default_host, BufferSize, Device, SampleRate, Stream, StreamConfig, SupportedStreamConfig}; use cpal::{default_host, BufferSize, Device, SampleRate, Stream, StreamConfig, SupportedStreamConfig};
use cpal::traits::{DeviceTrait, HostTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use crate::domain::event::EventBus; use crate::domain::event::{Event, EventBus};
use crate::utils::real_time_event::RealTimeEvent; use crate::utils::real_time_event::RealTimeEvent;
#[derive(Clone)] #[derive(Clone)]
@@ -80,21 +80,26 @@ impl AudioPlayback {
} }
pub async fn start(&mut self) { pub async fn start(&mut self) {
let stream_running = self.running.clone();
} let event_bus = self.event_bus.clone();
pub async fn stop(&mut self) {
self.running.store(false, std::sync::atomic::Ordering::SeqCst);
// stream cpal // stream cpal
println!("Setting up audio playback stream..."); println!("Setting up audio playback stream...");
let stream_running = self.running.clone();
let stream = self.speaker.build_stream(move |data, _| { let stream = self.speaker.build_stream(move |data, _| {
if !stream_running.load(Ordering::Relaxed){ if !stream_running.load(Ordering::Relaxed){
return; return;
} }
// aller récupérer 1920 sur un buffer // aller récupérer 1920 sur un buffer
// écrire le contenu dans data // écrire le contenu dans data
let _ = event_bus.emit(Event::PlaybackTick);
}); });
stream.play().unwrap();
self.stream = Some(stream);
println!("Audio playback stream started");
}
pub async fn stop(&mut self) {
self.running.store(false, std::sync::atomic::Ordering::SeqCst);
} }
} }

View File

@@ -8,11 +8,11 @@ use crate::core::opus::{AudioOpus, AudioOpusDecoder};
use crate::utils::ringbuf::{RingBufReader, RingBufWriter, RingBuffer}; use crate::utils::ringbuf::{RingBufReader, RingBufWriter, RingBuffer};
use crate::utils::shared_store::SharedArcMap; use crate::utils::shared_store::SharedArcMap;
struct AudioClient { pub struct AudioClient {
uuid: uuid::Uuid, uuid: uuid::Uuid,
decode_sender: mpsc::Sender<DecodeRequest>, decode_sender: mpsc::Sender<DecodeRequest>,
buffer_reader: RingBufReader<Vec<i16>>, buffer_reader: RingBufReader<i16>,
buffer_writer: RingBufWriter<Vec<i16>> buffer_writer: RingBufWriter<i16>
} }
struct DecodeRequest { struct DecodeRequest {
@@ -21,21 +21,23 @@ struct DecodeRequest {
} }
#[derive(Clone)] #[derive(Clone)]
struct AudioClientManager { pub struct AudioClientManager {
audio_clients: SharedArcMap<uuid::Uuid, AudioClient>, audio_clients: SharedArcMap<uuid::Uuid, AudioClient>,
} }
impl AudioClient { impl AudioClient {
pub fn new() -> Self { pub fn new() -> Self {
let (writer, reader) = RingBuffer::<Vec<i16>>::new(1024).split(); let (writer, reader) = RingBuffer::<i16>::new(4096).split();
let (decode_sender, mut decode_reader) = mpsc::channel::<DecodeRequest>(100); let (decode_sender, mut decode_reader) = mpsc::channel::<DecodeRequest>(100);
let writer_clone = writer.clone();
let decode_handle = tokio::spawn(async move { let decode_handle = tokio::spawn(async move {
let mut decoder = AudioOpus::new(44800, 1, "voip") let mut decoder = AudioOpus::new(48000, 1, "voip")
.create_decoder().unwrap(); .create_decoder().unwrap();
let mut last_sequence: u16 = 0; let mut last_sequence: u16 = 0;
while let Some(request) = decode_reader.recv().await { while let Some(request) = decode_reader.recv().await {
// si la séquence est "trop vieille" on la drop. (voir plus tard pour un système de ratrapage si c'est possible) // si la séquence est "trop vieille" on la drop. (voir plus tard pour un système de rattrapage si c'est possible)
if last_sequence < request.sequence { if last_sequence < request.sequence {
// todo : si le décodage est trop long, voir pour le mettre dans un thread // todo : si le décodage est trop long, voir pour le mettre dans un thread
// avec let result = tokio::task::spawn_blocking({ // avec let result = tokio::task::spawn_blocking({
@@ -50,7 +52,8 @@ impl AudioClient {
match result { match result {
Ok(audio_frame) => { Ok(audio_frame) => {
// Pousser la frame complète dans le buffer // Pousser la frame complète dans le buffer
writer.push(audio_frame); writer_clone.push_slice_overwrite(&audio_frame);
println!("writed frame");
}, },
Err(e) => { Err(e) => {
eprintln!("Erreur de décodage audio : {}", e); eprintln!("Erreur de décodage audio : {}", e);
@@ -69,8 +72,8 @@ impl AudioClient {
} }
} }
pub async fn write_audio(&self, sequence: u16, data: Bytes) { pub fn write_audio(&self, sequence: u16, data: Bytes) {
let _ = self.decode_sender.send(DecodeRequest { let _ = self.decode_sender.try_send(DecodeRequest {
data, data,
sequence sequence
}); });
@@ -79,9 +82,29 @@ impl AudioClient {
impl AudioClientManager { impl AudioClientManager {
fn new() -> Self { pub fn new() -> Self {
Self { Self {
audio_clients: SharedArcMap::new() audio_clients: SharedArcMap::new()
} }
} }
pub fn audio_client_exists(&self, uuid: uuid::Uuid) -> bool {
self.audio_clients.contains_key(&uuid)
}
pub fn get_audio_client(&self, uuid: uuid::Uuid) -> Option<Arc<AudioClient>> {
self.audio_clients.get(&uuid)
}
pub fn add_audio_client(&self, uuid: uuid::Uuid, audio_client: AudioClient) {
self.audio_clients.insert(uuid, audio_client);
}
pub fn remove_audio_client(&self, uuid: uuid::Uuid) {
self.audio_clients.remove(&uuid);
}
pub fn write_audio_to_client(&self, uuid: uuid::Uuid, sequence: u16, data: Bytes) {
let _ = self.audio_clients.get(&uuid).unwrap().write_audio(sequence, data);
}
} }

View File

@@ -1,4 +1,5 @@
use tokio::sync::mpsc; use bytes::Bytes;
// use tokio::sync::mpsc;
use crate::network::protocol::{MessageClient, MessageServer}; use crate::network::protocol::{MessageClient, MessageServer};
pub enum Event { pub enum Event {
@@ -8,6 +9,9 @@ pub enum Event {
AudioIn(Vec<i16>), AudioIn(Vec<i16>),
AudioEncoded(Vec<u8>), AudioEncoded(Vec<u8>),
PlaybackTick,
PlaybackRequest(Bytes),
NetConnected, NetConnected,
NetDisconnected, NetDisconnected,
NetIn(MessageServer), NetIn(MessageServer),
@@ -22,12 +26,12 @@ pub enum Event {
#[derive(Clone)] #[derive(Clone)]
pub struct EventBus { pub struct EventBus {
pub sender: mpsc::Sender<Event> pub sender: kanal::AsyncSender<Event>
} }
impl EventBus { impl EventBus {
pub fn new() -> (Self, mpsc::Receiver<Event>) { pub fn new() -> (Self, kanal::AsyncReceiver<Event>) {
let (sender, receiver) = mpsc::channel(4096); let (sender, receiver) = kanal::bounded_async::<Event>(4096);
(Self { sender }, receiver) (Self { sender }, receiver)
} }
@@ -40,7 +44,7 @@ impl EventBus {
let _ = self.sender.try_send(event); let _ = self.sender.try_send(event);
} }
pub fn clone_sender(&self) -> mpsc::Sender<Event> { pub fn clone_sender(&self) -> kanal::AsyncSender<Event> {
self.sender.clone() self.sender.clone()
} }
} }

View File

@@ -8,6 +8,7 @@ use bytes::Bytes;
use parking_lot::RwLock; use parking_lot::RwLock;
use tokio::task::AbortHandle; use tokio::task::AbortHandle;
use uuid::Uuid; use uuid::Uuid;
use crate::domain::audio_client::{AudioClient, AudioClientManager};
use crate::domain::event::{Event, EventBus}; use crate::domain::event::{Event, EventBus};
use crate::network::protocol::{MessageClient, MessageServer}; use crate::network::protocol::{MessageClient, MessageServer};
use crate::network::udp::UdpSession; use crate::network::udp::UdpSession;
@@ -20,11 +21,13 @@ struct PingInfo {
#[derive(Clone)] #[derive(Clone)]
pub struct Dispatcher { pub struct Dispatcher {
event_bus: EventBus, event_bus: Arc<EventBus>,
udp_session: UdpSession, udp_session: Arc<UdpSession>,
tauri_handle: AppHandle, tauri_handle: Arc<AppHandle>,
audio_client_manager: Arc<AudioClientManager>,
// todo : temporaire, le temps d'avoir un handler // todo : temporaire, le temps d'avoir un handler
@@ -51,23 +54,24 @@ impl PingInfo {
} }
impl Dispatcher { impl Dispatcher {
pub fn new(event_bus: EventBus, udp_session: UdpSession, tauri_handle: AppHandle) -> Self { pub fn new(event_bus: EventBus, udp_session: UdpSession, audio_client_manager: AudioClientManager, tauri_handle: AppHandle) -> Self {
Self { Self {
event_bus, event_bus: Arc::new(event_bus),
udp_session, udp_session: Arc::new(udp_session),
sequence_counter: Arc::new(AtomicU16::new(0)), sequence_counter: Arc::new(AtomicU16::new(0)),
tauri_handle, tauri_handle: Arc::new(tauri_handle),
can_send_audio: Arc::new(AtomicBool::new(false)), can_send_audio: Arc::new(AtomicBool::new(false)),
ping_tracker: Arc::new(RwLock::new(HashMap::new())), ping_tracker: Arc::new(RwLock::new(HashMap::new())),
audio_client_manager: Arc::new(audio_client_manager),
} }
} }
pub async fn start(&mut self, mut receiver: mpsc::Receiver<Event>) { pub async fn start(&self, receiver: kanal::AsyncReceiver<Event>) {
let (_udp_in_abort_handle, udp_in_sender) = self.udp_in_handler().await; let (_udp_in_abort_handle, udp_in_sender) = self.udp_in_handler().await;
let udp_session = self.udp_session.clone(); let udp_session = self.udp_session.clone();
let sequence_counter = self.sequence_counter.clone(); let sequence_counter = self.sequence_counter.clone();
while let Some(event) = receiver.recv().await { while let Ok(event) = receiver.recv().await {
match event { match event {
Event::AudioIn(sample) => { Event::AudioIn(sample) => {
@@ -83,9 +87,12 @@ impl Dispatcher {
udp_session.send(message).await; udp_session.send(message).await;
sequence_counter.fetch_add(1, Ordering::Relaxed); sequence_counter.fetch_add(1, Ordering::Relaxed);
}
Event::PlaybackTick => {
} }
Event::NetIn(message_event) => { Event::NetIn(message_event) => {
println!("NetIn: {:?}", message_event); // println!("NetIn: {:?}", message_event);
let _ = udp_in_sender.send(message_event).await; let _ = udp_in_sender.send(message_event).await;
} }
Event::NetOut(message_call) => { Event::NetOut(message_call) => {
@@ -117,6 +124,7 @@ impl Dispatcher {
pub async fn udp_in_handler(&self) -> (AbortHandle, mpsc::Sender<MessageServer>) { pub async fn udp_in_handler(&self) -> (AbortHandle, mpsc::Sender<MessageServer>) {
let (sender, mut consumer) = mpsc::channel::<MessageServer>(1024); let (sender, mut consumer) = mpsc::channel::<MessageServer>(1024);
let ping_tracker = Arc::clone(&self.ping_tracker); let ping_tracker = Arc::clone(&self.ping_tracker);
let audio_client_manager = self.audio_client_manager.clone();
let task = tokio::spawn(async move { let task = tokio::spawn(async move {
while let Some(message) = consumer.recv().await { while let Some(message) = consumer.recv().await {
@@ -135,7 +143,12 @@ impl Dispatcher {
} }
} }
MessageServer::Audio {user, sequence, data} => { MessageServer::Audio {user, sequence, data} => {
// Audio reçu if audio_client_manager.audio_client_exists(user) {
audio_client_manager.write_audio_to_client(user, sequence, data);
}else{
let client = AudioClient::new();
audio_client_manager.add_audio_client(user, client);
}
} }
} }
} }

View File

@@ -1,4 +1,4 @@
use tauri::Manager; use tauri::{Manager, WindowEvent};
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use crate::app::ox_speak_app::OxSpeakApp; use crate::app::ox_speak_app::OxSpeakApp;
@@ -45,6 +45,14 @@ pub async fn run() {
Ok(()) Ok(())
}) })
// .invoke_handler(tauri::generate_handler![greet]) // .invoke_handler(tauri::generate_handler![greet])
.on_window_event(|window, event| match event {
WindowEvent::CloseRequested {api, ..} => {
println!("Closing window");
}
_ => {
}
})
.run(context) .run(context)
.expect("error while running tauri application"); .expect("error while running tauri application");
} }