This commit is contained in:
2025-07-29 20:20:02 +02:00
parent 9b8461314f
commit 62dc6deb79
36 changed files with 2837 additions and 37 deletions

2
.gitignore vendored
View File

@@ -1 +1,3 @@
/target /target
.env
db.sqlite

57
.idea/workspace.xml generated
View File

@@ -1,5 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="AnalysisUIOptions">
<option name="ANALYZE_INJECTED_CODE" value="false" />
<option name="SCOPE_TYPE" value="3" />
</component>
<component name="AutoImportSettings"> <component name="AutoImportSettings">
<option name="autoReloadType" value="ALL" /> <option name="autoReloadType" value="ALL" />
</component> </component>
@@ -12,7 +16,40 @@
</component> </component>
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="ca698286-778f-4335-97c8-da35a666c986" name="Changes" comment="init"> <list default="true" id="ca698286-778f-4335-97c8-da35a666c986" name="Changes" comment="init">
<change afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/app/conf.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/domain/models.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/channel.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/master.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/message.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/mod.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/user.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/network/http_routes/websocket.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/mod.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/channel.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/link_sub_server_user.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/message.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/mod.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/sub_server.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/models/user.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/channel_repository.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/link_sub_server_user_repository.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/message_repository.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/mod.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/sub_server_repository.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/repositories/user_repository.rs" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/store/store_service.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.gitignore" beforeDir="false" afterPath="$PROJECT_DIR$/.gitignore" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Cargo.lock" beforeDir="false" afterPath="$PROJECT_DIR$/Cargo.lock" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Cargo.toml" beforeDir="false" afterPath="$PROJECT_DIR$/Cargo.toml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/app/app.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/app/app.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/app/mod.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/app/mod.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/domain/mod.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/domain/mod.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/lib.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/lib.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/main.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/main.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/network/mod.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/network/mod.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/runtime/dispatcher.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/runtime/dispatcher.rs" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/runtime/dispatcher.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/runtime/dispatcher.rs" afterDir="false" />
</list> </list>
<option name="SHOW_DIALOG" value="false" /> <option name="SHOW_DIALOG" value="false" />
@@ -61,6 +98,7 @@
&quot;org.rust.cargo.project.model.PROJECT_DISCOVERY&quot;: &quot;true&quot;, &quot;org.rust.cargo.project.model.PROJECT_DISCOVERY&quot;: &quot;true&quot;,
&quot;org.rust.cargo.project.model.impl.CargoExternalSystemProjectAware.subscribe.first.balloon&quot;: &quot;&quot;, &quot;org.rust.cargo.project.model.impl.CargoExternalSystemProjectAware.subscribe.first.balloon&quot;: &quot;&quot;,
&quot;org.rust.first.attach.projects&quot;: &quot;true&quot;, &quot;org.rust.first.attach.projects&quot;: &quot;true&quot;,
&quot;run.code.analysis.last.selected.profile&quot;: &quot;pProject Default&quot;,
&quot;settings.editor.selected.configurable&quot;: &quot;terminal&quot;, &quot;settings.editor.selected.configurable&quot;: &quot;terminal&quot;,
&quot;vue.rearranger.settings.migration&quot;: &quot;true&quot; &quot;vue.rearranger.settings.migration&quot;: &quot;true&quot;
} }
@@ -69,6 +107,10 @@
<key name="CopyFile.RECENT_KEYS"> <key name="CopyFile.RECENT_KEYS">
<recent name="D:\Dev\ox_speak_server\src\network" /> <recent name="D:\Dev\ox_speak_server\src\network" />
</key> </key>
<key name="MoveFile.RECENT_KEYS">
<recent name="D:\Dev\ox_speak_server\src\store" />
<recent name="D:\Dev\ox_speak_server\src\store\repositories" />
</key>
</component> </component>
<component name="RunManager"> <component name="RunManager">
<configuration name="Run" type="CargoCommandRunConfiguration" factoryName="Cargo Command"> <configuration name="Run" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
@@ -115,6 +157,21 @@
<workItem from="1752833798325" duration="5366000" /> <workItem from="1752833798325" duration="5366000" />
<workItem from="1752917416027" duration="1192000" /> <workItem from="1752917416027" duration="1192000" />
<workItem from="1752931843330" duration="2938000" /> <workItem from="1752931843330" duration="2938000" />
<workItem from="1752997629708" duration="9070000" />
<workItem from="1753107912894" duration="595000" />
<workItem from="1753225761416" duration="1356000" />
<workItem from="1753282037526" duration="5207000" />
<workItem from="1753397680991" duration="1782000" />
<workItem from="1753399490773" duration="3189000" />
<workItem from="1753436756029" duration="16895000" />
<workItem from="1753521176318" duration="17811000" />
<workItem from="1753601912332" duration="5843000" />
<workItem from="1753718175508" duration="8774000" />
<workItem from="1753800817354" duration="3570000" />
<workItem from="1753804571241" duration="59000" />
<workItem from="1753804642657" duration="236000" />
<workItem from="1753804898179" duration="625000" />
<workItem from="1753805533139" duration="2909000" />
</task> </task>
<task id="LOCAL-00001" summary="init"> <task id="LOCAL-00001" summary="init">
<option name="closed" value="true" /> <option name="closed" value="true" />

1815
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,4 +25,8 @@ dashmap = "6.1"
bytes = "1.10" bytes = "1.10"
arc-swap = "1.7" arc-swap = "1.7"
crossbeam-utils = "0.8" crossbeam-utils = "0.8"
kanal = "0.1" kanal = "0.1"
axum = { version = "0.8", features = ["ws", "default"] }
chrono = {version = "0.4", features = ["serde"]}
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "chrono", "uuid", "any", "sqlite", "postgres", "mysql" ] }
dotenvy = "0.15"

4
README.md Normal file
View File

@@ -0,0 +1,4 @@
créer un fichier de migration sqlx :
```shell
sqlx migrate add --source src/store/migrations migration_name
```

View File

@@ -3,8 +3,10 @@ use tokio::sync::mpsc;
use tokio::time::interval; use tokio::time::interval;
use crate::domain::client::ClientManager; use crate::domain::client::ClientManager;
use crate::domain::event::{Event, EventBus}; use crate::domain::event::{Event, EventBus};
use crate::network::http::HttpServer;
use crate::network::udp::UdpServer; use crate::network::udp::UdpServer;
use crate::runtime::dispatcher::Dispatcher; use crate::runtime::dispatcher::Dispatcher;
use crate::store::store_service::StoreService;
pub struct App { pub struct App {
// Communication inter-components // Communication inter-components
@@ -14,19 +16,26 @@ pub struct App {
// Network // Network
udp_server: UdpServer, udp_server: UdpServer,
http_server: HttpServer,
// Clients // Clients
client_manager: ClientManager, client_manager: ClientManager,
// store
store: StoreService,
} }
impl App { impl App {
pub async fn new() -> Self { pub async fn new() -> Self {
let (event_bus, event_rx) = EventBus::new(); let (event_bus, event_rx) = EventBus::new();
let store = StoreService::new("./db.sqlite").await.unwrap();
let udp_server = UdpServer::new(event_bus.clone(), "0.0.0.0:5000").await; let udp_server = UdpServer::new(event_bus.clone(), "0.0.0.0:5000").await;
let http_server = HttpServer::new(event_bus.clone(), store.clone());
let client_manager = ClientManager::new(); let client_manager = ClientManager::new();
let dispatcher = Dispatcher::new(event_bus.clone(), udp_server.clone(), client_manager.clone()).await;
let dispatcher = Dispatcher::new(event_bus.clone(), udp_server.clone(), client_manager.clone(), store.clone()).await;
Self { Self {
@@ -34,7 +43,9 @@ impl App {
dispatcher, dispatcher,
event_rx, event_rx,
udp_server, udp_server,
client_manager http_server,
client_manager,
store
} }
} }
@@ -49,6 +60,7 @@ impl App {
let _ = self.udp_server.start().await; let _ = self.udp_server.start().await;
let _ = self.http_server.start("0.0.0.0:5000").await;
let _ = self.tick_tasks().await; let _ = self.tick_tasks().await;
println!("App started"); println!("App started");
} }

28
src/app/conf.rs Normal file
View File

@@ -0,0 +1,28 @@
use std::env;
use std::path::Path;
pub fn load_env() {
// display le répertoire de travail
match env::current_dir() {
Ok(path) => {
println!("Répertoire de travail: {}", path.display())
}
Err(e) => {
eprintln!("Erreur pour obtenir le répertoire: {}", e)
}
}
// Vérifier si le .env existe
let env_path = Path::new(".env");
if env_path.exists() {
println!(".env trouvé");
// Charger le .env
match dotenvy::from_path(env_path) {
Ok(_) => println!("✅ Fichier .env chargé avec succès"),
Err(e) => eprintln!("❌ Erreur lors du chargement du .env: {}", e),
}
} else {
println!("⚠️ Le fichier .env n'existe pas");
}
}

View File

@@ -1 +1,2 @@
pub mod app; pub mod app;
pub mod conf;

View File

@@ -1,3 +1,4 @@
pub mod event; pub mod event;
pub mod user; pub mod user;
pub mod client; pub mod client;
pub mod models;

65
src/domain/models.rs Normal file
View File

@@ -0,0 +1,65 @@
use std::sync::Arc;
use parking_lot::Mutex;
use uuid::Uuid;
use chrono::{DateTime, Utc};
/////////////////////////////////////
//////////// Main models ///////////
////////////////////////////////////
struct SubServer {
// Un serveur (master) peu avoir plusieurs subserver
// (le master, pas besoin de le représenter, sa configuration sera depuis un .env)
id: Uuid,
name: String,
password: String, // voir si on le hash, mais sera certainement pas nécessaire.
created_at: DateTime<Utc>,
}
struct Channel {
id: Uuid,
channel_type: ChannelType, // le type ne pourra pas être edit, comme discord
sub_server_id: Uuid, // fk subserver
name: String,
created_at: DateTime<Utc>,
}
struct User {
id: Uuid,
name: String, // je sais pas si il est nécessaire étant donné qu'on utilisera display_name de la relation.
pub_key: String, // l'identification se fera par clé public/privé, comme teamspeak
}
struct Message {
id: Uuid,
channel_id: Uuid,
user_id: Uuid,
content: String,
created_at: DateTime<Utc>,
}
/////////////////////////////////////
////////// n-n relations ///////////
////////////////////////////////////
struct SubServerUser {
sub_server: SubServer,
user: User,
display_name: String,
}
/////////////////////////////////////
//////// Enum Type (choice) ////////
////////////////////////////////////
enum ChannelType {
Text,
Voice,
}
/// Store centralisé pour tous les modèles de données
#[derive(Clone)]
pub struct DataStore {
sub_servers: Arc<Mutex<SubServer>>
}

View File

@@ -3,4 +3,5 @@ pub mod core;
pub mod domain; pub mod domain;
pub mod network; pub mod network;
pub mod runtime; pub mod runtime;
pub mod utils; pub mod utils;
pub mod store;

View File

@@ -1,8 +1,12 @@
use tokio::signal; use tokio::signal;
use ox_speak_server_lib::app::app::App; use ox_speak_server_lib::app::app::App;
use ox_speak_server_lib::app::conf::load_env;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
// Charger le .env
load_env();
let mut app = App::new().await; let mut app = App::new().await;
app.start().await; app.start().await;

70
src/network/http.rs Normal file
View File

@@ -0,0 +1,70 @@
use std::net::SocketAddr;
use std::sync::Arc;
use axum::{
extract::{ws::WebSocket, ws::WebSocketUpgrade, State},
response::Response,
Json, Router,
routing::{get, post}
};
use tokio::net::TcpListener;
use crate::domain::event::EventBus;
use crate::network::http_routes::master;
use crate::store::store_service::StoreService;
#[derive(Clone)]
pub struct HttpState {
pub event_bus: EventBus,
pub store: StoreService
}
#[derive(Clone)]
pub struct HttpServer {
state: HttpState
}
impl HttpServer {
pub fn new(event_bus: EventBus, store: StoreService) -> Self {
Self {
state: HttpState {
event_bus,
store
},
}
}
pub async fn start(&self, addr: &str) -> Result<(), Box<dyn std::error::Error>> {
let router = self.create_router();
let listener = TcpListener::bind(addr).await?;
println!("HTTP/WebSocket server listening on addr {}", listener.local_addr()?);
tokio::spawn(async move {
let _ = axum::serve(listener, router).await;
});
Ok(())
}
fn create_router(&self) -> Router {
let api_route = Router::new()
.nest("/master", master::create_router());
Router::new()
.nest("/api", api_route)
// .route("/ws", get(Self::ws))
// .route("/stats/", get(Self::stats))
.with_state(self.state.clone())
}
}
/// Routes
impl HttpServer {
async fn stats(State(state): State<Arc<HttpState>>) -> Json<String> {
todo!("a faire")
}
async fn ws(ws: WebSocketUpgrade, State(state): State<Arc<HttpState>>) -> Response {
ws.on_upgrade(|socket| async move {
todo!("a faire")
})
}
}

View File

View File

@@ -0,0 +1,18 @@
use std::collections::{HashMap};
use std::hash::Hash;
use axum::{
extract::{Path, State},
response::Json,
routing::{get, post, put, delete},
Router,
};
use crate::network::http::HttpState;
pub fn create_router() -> Router<HttpState> {
Router::new()
.route("/auth/", post(join_master_server))
}
pub async fn join_master_server(State(state): State<HttpState>) -> Json<HashMap<String, String>> {
todo!("join master server")
}

View File

View File

@@ -0,0 +1,5 @@
pub mod user;
pub mod channel;
pub mod message;
pub mod websocket;
pub mod master;

View File

View File

View File

@@ -1,2 +1,4 @@
pub mod protocol; pub mod protocol;
pub mod udp; pub mod udp;
pub mod http;
pub mod http_routes;

View File

@@ -7,21 +7,25 @@ use crate::domain::client::{Client, ClientManager};
use crate::domain::event::{Event, EventBus}; use crate::domain::event::{Event, EventBus};
use crate::network::protocol::{UDPMessageType, UDPMessage, UdpBroadcastMessage, UDPMessageData}; use crate::network::protocol::{UDPMessageType, UDPMessage, UdpBroadcastMessage, UDPMessageData};
use crate::network::udp::UdpServer; use crate::network::udp::UdpServer;
use crate::store::store_service::StoreService;
#[derive(Clone)] #[derive(Clone)]
pub struct Dispatcher { pub struct Dispatcher {
event_bus: Arc<EventBus>, event_bus: Arc<EventBus>,
udp_server: Arc<UdpServer>, udp_server: Arc<UdpServer>,
client_manager: Arc<ClientManager> client_manager: Arc<ClientManager>,
store: StoreService,
} }
impl Dispatcher { impl Dispatcher {
pub async fn new(event_bus: EventBus, udp_server: UdpServer, client_manager: ClientManager) -> Self { pub async fn new(event_bus: EventBus, udp_server: UdpServer, client_manager: ClientManager, store: StoreService) -> Self {
Self { Self {
event_bus: Arc::new(event_bus), event_bus: Arc::new(event_bus),
udp_server: Arc::new(udp_server), udp_server: Arc::new(udp_server),
client_manager: Arc::new(client_manager), client_manager: Arc::new(client_manager),
store,
} }
} }
@@ -73,12 +77,14 @@ impl Dispatcher {
// Traitement direct du message sans double parsing // Traitement direct du message sans double parsing
match message.message_type() { match message.message_type() {
UDPMessageType::Ping => { UDPMessageType::Ping => {
println!("ping from {:?}", message);
let response_message = UDPMessage::server_ping(message.address, message.get_message_id().unwrap()); let response_message = UDPMessage::server_ping(message.address, message.get_message_id().unwrap());
if client_manager.client_exists(message.address) { if client_manager.client_exists(message.address) {
client_manager.update_client_last_seen(message.address); client_manager.update_client_last_seen(message.address);
}else { }else {
client_manager.add(Client::new(message.address)); client_manager.add(Client::new(message.address));
println!("new client: {:?}", message.address);
} }
let _ = udp_server.send_udp_message(&response_message).await; let _ = udp_server.send_udp_message(&response_message).await;
} }
@@ -90,7 +96,7 @@ impl Dispatcher {
let _ = udp_server.broadcast_udp_message(&response_message).await; let _ = udp_server.broadcast_udp_message(&response_message).await;
} else { } else {
// Tu peux gérer ici le cas où lUUID nest pas trouvé (optionnel) // Tu peux gérer ici le cas où lUUID nest pas trouvé (optionnel)
println!("UUID non trouvé pour l'adresse: {:?}", message.address); // println!("UUID non trouvé pour l'adresse: {:?}", message.address);
} }
} }

View File

@@ -0,0 +1,70 @@
-- Add migration script here
-- Création de la table user
CREATE TABLE user
(
id TEXT PRIMARY KEY NOT NULL,
username TEXT NOT NULL UNIQUE,
email TEXT,
avatar_url TEXT,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
-- Création de la table sub_server
CREATE TABLE sub_server
(
id TEXT PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
description TEXT,
owner_id TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
FOREIGN KEY (owner_id) REFERENCES user (id)
);
-- Création de la table channel
CREATE TABLE channel
(
id TEXT PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
description TEXT,
channel_type TEXT NOT NULL,
sub_server_id TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
FOREIGN KEY (sub_server_id) REFERENCES sub_server (id)
);
-- Création de la table message
CREATE TABLE message
(
id TEXT PRIMARY KEY NOT NULL,
content TEXT NOT NULL,
author_id TEXT NOT NULL,
channel_id TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
FOREIGN KEY (author_id) REFERENCES user (id),
FOREIGN KEY (channel_id) REFERENCES channel (id)
);
-- Table de liaison pour les utilisateurs et sub_servers (relation N-N)
CREATE TABLE sub_server_user
(
id TEXT PRIMARY KEY NOT NULL,
sub_server_id TEXT NOT NULL,
user_id TEXT NOT NULL,
joined_at TEXT NOT NULL,
role TEXT,
FOREIGN KEY (sub_server_id) REFERENCES sub_server (id),
FOREIGN KEY (user_id) REFERENCES user (id),
UNIQUE (sub_server_id, user_id)
);
-- Index pour améliorer les performances
CREATE INDEX idx_channel_sub_server ON channel (sub_server_id);
CREATE INDEX idx_message_channel ON message (channel_id);
CREATE INDEX idx_message_author ON message (author_id);
CREATE INDEX idx_sub_server_user_sub_server ON sub_server_user (sub_server_id);
CREATE INDEX idx_sub_server_user_user ON sub_server_user (user_id);

3
src/store/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod models;
pub mod repositories;
pub mod store_service;

View File

@@ -0,0 +1,32 @@
// use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
// use uuid::Uuid;
use sqlx::types::{Uuid, chrono::{{ DateTime, Utc}}};
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct Channel {
pub id: Uuid,
pub sub_server_id: Uuid,
pub channel_type: ChannelType,
pub name: String,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone, sqlx::Type, Serialize, Deserialize)]
#[repr(i32)]
pub enum ChannelType {
Text = 0,
Voice = 1,
}
impl Channel {
pub fn new(sub_server_id: Uuid, channel_type: ChannelType, name: String) -> Self {
Self {
id: Uuid::new_v4(),
sub_server_id,
channel_type,
name,
created_at: Utc::now(),
}
}
}

View File

@@ -0,0 +1,32 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct LinkSubServerUser {
pub id: Option<u64>,
pub sub_server_id: Uuid,
pub user_id: Uuid,
pub display_username: String,
pub joined_at: DateTime<Utc>,
pub last_seen_at: Option<DateTime<Utc>>,
pub is_admin: bool,
}
impl LinkSubServerUser {
pub fn new(
sub_server_id: Uuid,
user_id: Uuid,
display_username: String
) -> Self {
Self {
id: None,
sub_server_id,
user_id,
display_username,
joined_at: Utc::now(),
last_seen_at: None,
is_admin: false,
}
}
}

View File

@@ -0,0 +1,24 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct Message {
pub id: Uuid,
pub channel_id: Uuid,
pub user_id: Uuid,
pub content: String,
pub created_at: DateTime<Utc>,
}
impl Message {
pub fn new(channel_id: Uuid, user_id: Uuid, content: String) -> Self {
Self {
id: Uuid::new_v4(),
channel_id,
user_id,
content,
created_at: Utc::now(),
}
}
}

11
src/store/models/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
pub mod sub_server;
pub mod channel;
pub mod user;
pub mod message;
pub mod link_sub_server_user;
pub use sub_server::*;
pub use channel::*;
pub use user::*;
pub use message::*;
pub use link_sub_server_user::*;

View File

@@ -0,0 +1,24 @@
// L'application peut avoir plusieurs sous servers
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct SubServer {
pub id: Uuid,
pub name: String,
pub password: String, // voir si on le hash, mais sera certainement pas nécessaire.
pub created_at: DateTime<Utc>,
}
impl SubServer {
pub fn new(name: String, password: String) -> Self {
Self {
id: Uuid::new_v4(),
name,
password,
created_at: Utc::now(),
}
}
}

13
src/store/models/user.rs Normal file
View File

@@ -0,0 +1,13 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct User {
pub id: Uuid,
pub username: String,
pub pub_key: String, // l'identification se fera par clé public/privé, comme teamspeak
pub created_at: DateTime<Utc>,
}

View File

@@ -0,0 +1,100 @@
use std::sync::Arc;
use sqlx::SqlitePool;
use uuid::Uuid;
use crate::store::models::channel::Channel;
use crate::store::store_service::StoreService;
use crate::utils::shared_store::SharedArcMap;
#[derive(Clone)]
pub struct ChannelRepository {
store: StoreService,
}
impl ChannelRepository {
pub fn new(store: StoreService) -> Self {
Self {
store
}
}
}
impl ChannelRepository {
// getters
pub async fn all(&self) -> Vec<Arc<Channel>> {
self.store.channels.values().collect()
}
pub async fn get(&self, id: Uuid) -> Option<Arc<Channel>> {
self.store.channels.get(&id)
}
}
// pub id: Uuid,
// pub sub_server_id: Uuid,
// pub channel_type: ChannelType,
// pub name: String,
// pub created_at: DateTime<Utc>,
impl ChannelRepository {
// writers
pub async fn create(&self, mut channel: Channel) -> Result<Arc<Channel>, sqlx::Error> {
sqlx::query(
"INSERT INTO channel (id, sub_server, channel_type, name, created_at) VALUES (?, ?, ?, ?, ?)"
)
.bind(&channel.id)
.bind(&channel.sub_server_id)
.bind(&channel.channel_type)
.bind(&channel.name)
.bind(&channel.created_at)
.execute(&self.store.db)
.await?;
// ajouter au cache
let arc_server = Arc::new(channel.clone());
self.store.channels.insert_arc(channel.id, arc_server.clone());
Ok(arc_server)
}
pub async fn save(&self, sub_server: &Channel) -> Result<(), sqlx::Error> {
sqlx::query(
"UPDATE channel SET name = ? WHERE id = ?"
)
.bind(&sub_server.name)
.bind(&sub_server.id)
.execute(&self.store.db)
.await?;
// Mettre à jour le cache
self.store.channels.insert(sub_server.id, sub_server.clone());
Ok(())
}
pub async fn delete(&self, id: Uuid) -> Result<bool, sqlx::Error> {
let rows_affected = sqlx::query("DELETE FROM sub_server WHERE id = ?")
.bind(&id)
.execute(&self.store.db)
.await?
.rows_affected();
if rows_affected > 0 {
self.store.channels.remove(&id);
Ok(true)
} else {
Ok(false)
}
}
// Pour initialiser le cache depuis la DB
pub async fn load_all_from_db(&self) -> Result<(), sqlx::Error> {
let servers: Vec<Channel> = sqlx::query_as("SELECT * FROM sub_server")
.fetch_all(&self.store.db)
.await?;
for server in servers {
self.store.channels.insert(server.id, server);
}
Ok(())
}
}

View File

@@ -0,0 +1,67 @@
use std::sync::Arc;
use uuid::Uuid;
use chrono::{DateTime, Utc};
use sqlx::SqlitePool;
use crate::store::models::link_sub_server_user::LinkSubServerUser;
use crate::store::store_service::StoreService;
use crate::utils::shared_store::SharedArcVec;
#[derive(Clone)]
pub struct LinkSubServerUserRepository {
store: StoreService
}
impl LinkSubServerUserRepository {
pub fn new(store: StoreService) -> Self {
Self { store }
}
}
impl LinkSubServerUserRepository {
// getters
/// Obtenir une relation spécifique
pub async fn get_relation(&self, sub_server_id: Uuid, user_id: Uuid) -> Option<LinkSubServerUser> {
self.store.sub_server_users.iter()
.find(|relation| {
relation.sub_server_id == sub_server_id && relation.user_id == user_id
})
.map(|arc| (*arc).clone())
}
pub async fn exists(&self, sub_server_id: Uuid, user_id: Uuid) -> bool {
self.get_relation(sub_server_id, user_id).await.is_some()
}
}
impl LinkSubServerUserRepository {
// writers
/// Créer une nouvelle relation
pub async fn create(&self, through: LinkSubServerUser) -> Result<LinkSubServerUser, sqlx::Error> {
// Vérifier que la relation n'existe pas déjà
if self.exists(through.sub_server_id, through.user_id).await {
return Err(sqlx::Error::RowNotFound); // Ou une erreur custom
}
// Insérer en base
sqlx::query(
"INSERT INTO sub_server_users
(sub_server_id, user_id, display_name, joined_at, last_seen, is_admin)
VALUES (?, ?, ?, ?, ?, ?)"
)
.bind(&through.sub_server_id)
.bind(&through.user_id)
.bind(&through.display_username)
.bind(&through.joined_at)
.bind(&through.last_seen_at)
.bind(&through.is_admin)
.execute(&self.store.db)
.await?;
// Ajouter au cache
self.store.sub_server_users.push(through.clone());
Ok(through)
}
}

View File

@@ -0,0 +1,70 @@
use std::sync::Arc;
use sqlx::SqlitePool;
use uuid::Uuid;
use crate::store::models::message::Message;
use crate::store::store_service::StoreService;
#[derive(Clone)]
pub struct MessageRepository {
store: StoreService
}
impl MessageRepository {
pub fn new(store: StoreService) -> Self {
Self {
store
}
}
}
impl MessageRepository {
// getters
}
impl MessageRepository {
// writers
pub async fn create(&self, mut message: Message) -> Result<Arc<Message>, sqlx::Error> {
sqlx::query(
"INSERT INTO message (id, channel_id, user_id, content, created_at) VALUES (?, ?, ?, ?, ?)"
)
.bind(&message.id)
.bind(&message.channel_id)
.bind(&message.user_id)
.bind(&message.content)
.bind(&message.created_at)
.execute(&self.store.db)
.await?;
// ajouter au cache
let arc_message = Arc::new(message.clone());
Ok(arc_message)
}
pub async fn save(&self, message: &Message) -> Result<(), sqlx::Error> {
sqlx::query(
"UPDATE message SET content = ? WHERE id = ?"
)
.bind(&message.content)
.bind(&message.id)
.execute(&self.store.db)
.await?;
Ok(())
}
pub async fn delete(&self, id: Uuid) -> Result<bool, sqlx::Error> {
let rows_affected = sqlx::query("DELETE FROM message WHERE id = ?")
.bind(&id)
.execute(&self.store.db)
.await?
.rows_affected();
if rows_affected > 0 {
Ok(true)
} else {
Ok(false)
}
}
}

View File

@@ -0,0 +1,11 @@
mod sub_server_repository;
mod channel_repository;
mod user_repository;
mod message_repository;
mod link_sub_server_user_repository;
pub use sub_server_repository::*;
pub use channel_repository::*;
pub use user_repository::*;
pub use message_repository::*;
pub use link_sub_server_user_repository::*;

View File

@@ -0,0 +1,95 @@
use std::sync::Arc;
use sqlx::SqlitePool;
use uuid::Uuid;
use crate::store::models::sub_server::SubServer;
use crate::store::store_service::StoreService;
use crate::utils::shared_store::SharedArcMap;
#[derive(Clone)]
pub struct SubServerRepository {
store: StoreService
}
impl SubServerRepository {
pub fn new(store: StoreService) -> Self {
Self {
store
}
}
}
impl SubServerRepository {
// getters
pub async fn all(&self) -> Vec<Arc<SubServer>> {
self.store.sub_servers.values().collect()
}
pub async fn get(&self, id: Uuid) -> Option<Arc<SubServer>> {
self.store.sub_servers.get(&id)
}
}
impl SubServerRepository {
// writers
pub async fn create(&self, mut sub_server: SubServer) -> Result<Arc<SubServer>, sqlx::Error> {
sqlx::query(
"INSERT INTO sub_server (id, name, password, created_at) VALUES (?, ?, ?, ?)"
)
.bind(&sub_server.id)
.bind(&sub_server.name)
.bind(&sub_server.password)
.bind(&sub_server.created_at)
.execute(&self.store.db)
.await?;
// ajouter au cache
let arc_server = Arc::new(sub_server.clone());
self.store.sub_servers.insert_arc(sub_server.id, arc_server.clone());
Ok(arc_server)
}
pub async fn save(&self, sub_server: &SubServer) -> Result<(), sqlx::Error> {
sqlx::query(
"UPDATE sub_server SET name = ?, password = ? WHERE id = ?"
)
.bind(&sub_server.name)
.bind(&sub_server.password)
.bind(&sub_server.id)
.execute(&self.store.db)
.await?;
// Mettre à jour le cache
self.store.sub_servers.insert(sub_server.id, sub_server.clone());
Ok(())
}
pub async fn delete(&self, id: Uuid) -> Result<bool, sqlx::Error> {
let rows_affected = sqlx::query("DELETE FROM sub_server WHERE id = ?")
.bind(&id)
.execute(&self.store.db)
.await?
.rows_affected();
if rows_affected > 0 {
self.store.sub_servers.remove(&id);
Ok(true)
} else {
Ok(false)
}
}
// Pour initialiser le cache depuis la DB
pub async fn load_all_from_db(&self) -> Result<(), sqlx::Error> {
let servers: Vec<SubServer> = sqlx::query_as("SELECT * FROM sub_server")
.fetch_all(&self.store.db)
.await?;
for server in servers {
self.store.sub_servers.insert(server.id, server);
}
Ok(())
}
}

View File

@@ -0,0 +1,96 @@
use std::sync::Arc;
use sqlx::SqlitePool;
use uuid::Uuid;
use crate::store::models::user::User;
use crate::utils::shared_store::SharedArcMap;
#[derive(Clone)]
pub struct UserRepository {
db: SqlitePool,
cache: SharedArcMap<Uuid, User>
}
impl UserRepository {
pub fn new(db: SqlitePool, cache: SharedArcMap<Uuid, User>) -> Self {
Self {
db,
cache
}
}
}
impl UserRepository {
// getters
pub async fn all(&self) -> Vec<Arc<User>> {
self.cache.values().collect()
}
pub async fn get(&self, id: Uuid) -> Option<Arc<User>> {
self.cache.get(&id)
}
}
impl UserRepository {
// writers
pub async fn create(&self, mut user: User) -> Result<Arc<User>, sqlx::Error> {
sqlx::query(
"INSERT INTO user (id, name, password, created_at) VALUES (?, ?, ?, ?)"
)
.bind(&user.id)
.bind(&user.username)
.bind(&user.pub_key)
.bind(&user.created_at)
.execute(&self.db)
.await?;
// ajouter au cache
let arc_server = Arc::new(user.clone());
self.cache.insert_arc(user.id, arc_server.clone());
Ok(arc_server)
}
pub async fn save(&self, user: &User) -> Result<(), sqlx::Error> {
sqlx::query(
"UPDATE user SET name = ?, password = ? WHERE id = ?"
)
.bind(&user.username)
.bind(&user.pub_key)
.bind(&user.id)
.execute(&self.db)
.await?;
// Mettre à jour le cache
self.cache.insert(user.id, user.clone());
Ok(())
}
pub async fn delete(&self, id: Uuid) -> Result<bool, sqlx::Error> {
let rows_affected = sqlx::query("DELETE FROM user WHERE id = ?")
.bind(&id)
.execute(&self.db)
.await?
.rows_affected();
if rows_affected > 0 {
self.cache.remove(&id);
Ok(true)
} else {
Ok(false)
}
}
// Pour initialiser le cache depuis la DB
pub async fn load_all_from_db(&self) -> Result<(), sqlx::Error> {
let servers: Vec<User> = sqlx::query_as("SELECT * FROM user")
.fetch_all(&self.db)
.await?;
for server in servers {
self.cache.insert(server.id, server);
}
Ok(())
}
}

111
src/store/store_service.rs Normal file
View File

@@ -0,0 +1,111 @@
use std::sync::Arc;
use uuid::Uuid;
use sqlx::{AnyPool, SqlitePool};
use crate::store::models::{SubServer, Channel, User, Message, ChannelType, LinkSubServerUser};
use crate::store::repositories::{
SubServerRepository, ChannelRepository, UserRepository,
MessageRepository, LinkSubServerUserRepository
};
use crate::utils::shared_store::{SharedArcMap, SharedArcVec};
#[derive(Debug, Clone)]
pub enum StoreEvent {
SubServerCreated(Arc<SubServer>),
SubServerUpdated(Arc<SubServer>),
ChannelCreated(Arc<Channel>),
ChannelUpdated(Arc<Channel>),
MessageSent(Message),
UserJoinedSubServer { sub_server_id: Uuid, user_id: Uuid },
}
#[derive(Clone)]
pub struct StoreService {
// Database
pub db: SqlitePool,
// ✅ Caches mémoire centralisés
pub users: SharedArcMap<Uuid, User>,
pub sub_servers: SharedArcMap<Uuid, SubServer>,
pub channels: SharedArcMap<Uuid, Channel>,
pub sub_server_users: SharedArcVec<LinkSubServerUser>,
}
impl StoreService {
pub async fn new(database_url: &str) -> Result<Self, sqlx::Error> {
let connection_url = Self::normalize_database_url(database_url);
println!("🔍 Tentative de connexion à: {}", connection_url);
let db = SqlitePool::connect(&connection_url).await?;
sqlx::migrate!("./src/store/migrations").run(&db).await?;
let service = Self {
db,
users: SharedArcMap::new(),
sub_servers: SharedArcMap::new(),
channels: SharedArcMap::new(),
sub_server_users: SharedArcVec::new(),
};
// Charger tout en mémoire au démarrage
let _ = service.load_all_caches().await;
Ok(service)
}
async fn load_all_caches(&self) -> Result<(), sqlx::Error> {
// Users
let users: Vec<User> = sqlx::query_as("SELECT * FROM user")
.fetch_all(&self.db)
.await?;
for user in users {
self.users.insert(user.id, user);
}
// SubServers
let sub_servers: Vec<SubServer> = sqlx::query_as("SELECT * FROM sub_server")
.fetch_all(&self.db)
.await?;
for sub_server in sub_servers {
self.sub_servers.insert(sub_server.id, sub_server);
}
// Channels
let channels: Vec<Channel> = sqlx::query_as("SELECT * FROM channel")
.fetch_all(&self.db)
.await?;
for channel in channels {
self.channels.insert(channel.id, channel);
}
// Relations N-N
let relations: Vec<LinkSubServerUser> = sqlx::query_as("SELECT * FROM sub_server_user")
.fetch_all(&self.db)
.await?;
for relation in relations {
self.sub_server_users.push(relation);
}
Ok(())
}
}
impl StoreService {
// ===== HELPERS =====
/// ✅ Normalise l'URL pour supporter différentes bases de données
fn normalize_database_url(database_url: &str) -> String {
// Si c'est déjà une URL complète, on la retourne telle quelle
if database_url.contains("://") {
return database_url.to_string();
}
// Sinon, on assume que c'est SQLite (comportement actuel)
if database_url.starts_with("sqlite:") {
database_url.to_string()
} else {
format!("sqlite:{}?mode=rwc", database_url)
}
}
}