This commit is contained in:
2025-06-03 11:42:51 +03:00
parent 384e6aede2
commit e0a45216b0
9 changed files with 5262 additions and 30 deletions

5062
backend.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -103,3 +103,61 @@ BEGIN
RETURN new_channel_id;
END;
$$;
CREATE OR REPLACE FUNCTION create_group_channel(
p_creator_id UUID, -- The user initiating the group creation (will be owner)
p_recipient_user_ids UUID[], -- Array of all user IDs for the group (must include creator)
p_group_channel_type INT2 -- The channel type identifier for groups (e.g., 1)
)
RETURNS UUID
LANGUAGE plpgsql
AS
$$
DECLARE
new_channel_id UUID;
existing_channel_id UUID;
final_channel_name VARCHAR;
unique_sorted_recipient_ids UUID[];
num_recipients INT;
uid UUID;
-- Threshold for detailed name vs. summary name
MAX_MEMBERS_FOR_DETAILED_NAME CONSTANT INT := 3;
BEGIN
-- Validate and process recipient IDs
IF p_recipient_user_ids IS NULL OR array_length(p_recipient_user_ids, 1) IS NULL THEN
RAISE EXCEPTION 'Recipient user IDs array must be provided and not empty.';
END IF;
-- Get unique, sorted recipient IDs for consistent checking and to avoid duplicates.
SELECT array_agg(DISTINCT u ORDER BY u) INTO unique_sorted_recipient_ids FROM unnest(p_recipient_user_ids) u;
num_recipients := array_length(unique_sorted_recipient_ids, 1);
-- Validate minimum number of recipients for a group
IF num_recipients < 1 THEN -- Groups typically have at least 2 members
RAISE EXCEPTION 'Group channels (type %) must have at least 2 recipients. Found %.', p_group_channel_type, num_recipients;
END IF;
-- Create new group channel
INSERT INTO "channel" ("name", "type", "position", "owner_id", "server_id", "parent")
VALUES ('Group',
p_group_channel_type,
0, -- Default position
p_creator_id,
NULL, -- Not a server channel
NULL -- Not a nested server channel
)
RETURNING id INTO new_channel_id;
-- Add all recipients to the channel_recipient table
INSERT INTO "channel_recipient" ("channel_id", "user_id")
VALUES (new_channel_id, p_creator_id);
INSERT INTO "channel_recipient" ("channel_id", "user_id")
SELECT new_channel_id, r_id
FROM unnest(unique_sorted_recipient_ids) AS r_id;
RAISE NOTICE 'Group channel (type %) named "%" created with ID: % by owner % for recipients: %',
p_group_channel_type, final_channel_name, new_channel_id, p_creator_id, unique_sorted_recipient_ids;
RETURN new_channel_id;
END;
$$;

View File

@@ -621,7 +621,7 @@ impl Database {
&self,
user1_id: entity::user::Id,
user2_id: entity::user::Id,
) -> Result<Option<entity::channel::Id>> {
) -> Result<entity::channel::Id> {
let channel_id = sqlx::query_scalar!(
r#"SELECT create_dm_channel($1, $2, $3)"#,
user1_id,
@@ -629,7 +629,26 @@ impl Database {
entity::channel::ChannelType::DirectMessage as i16
)
.fetch_one(&self.pool)
.await?;
.await?
.expect("channel_id is null");
Ok(channel_id)
}
pub async fn procedure_create_group_channel(
&self,
creator_id: entity::user::Id,
users: &[entity::user::Id],
) -> Result<entity::channel::Id> {
let channel_id = sqlx::query_scalar!(
r#"SELECT create_group_channel($1, $2, $3)"#,
creator_id,
users,
entity::channel::ChannelType::DirectMessage as i16
)
.fetch_one(&self.pool)
.await?
.expect("channel_id is null");
Ok(channel_id)
}

View File

@@ -71,6 +71,7 @@ fn protected_router() -> axum::Router<state::AppState> {
.route("/users/@me", get(user::me))
.route("/users/@me", patch(user::patch))
.route("/users/@me/channels", get(user::channel::list))
.route("/users/@me/channels", post(user::channel::create))
.route("/users/{id}", get(user::get_by_id))
// channel
.route(

View File

@@ -0,0 +1,75 @@
use axum::Json;
use axum::extract::State;
use axum::response::IntoResponse;
use axum_extra::extract::WithRejection;
use serde::Deserialize;
use validator::Validate;
use crate::state::AppState;
use crate::web::context::UserContext;
use crate::web::entity::user::PartialUser;
use crate::web::route::user::channel::RecipientChannel;
use crate::web::ws;
use crate::{entity, web};
#[derive(Debug, Validate, Deserialize)]
pub struct CreatePayload {
#[validate(length(min = 1, max = 32))]
recipients: Vec<entity::user::Id>,
}
pub async fn create(
State(state): State<AppState>,
context: UserContext,
WithRejection(Json(payload), _): WithRejection<Json<CreatePayload>, web::Error>,
) -> web::Result<impl IntoResponse> {
match payload.validate() {
Ok(_) => {},
Err(err) => {
return Err(web::error::ClientError::ValidationFailed(err).into());
},
}
let channel_id = match payload.recipients.len() {
1 => {
let recipient = payload.recipients[0];
state
.database
.procedure_create_dm_channel(context.user_id, recipient)
.await?
},
_ => {
state
.database
.procedure_create_group_channel(context.user_id, &payload.recipients)
.await?
},
};
let channel = state.database.select_channel_by_id(channel_id).await?;
let recipients = state
.database
.select_channel_recipients(channel_id)
.await?
.unwrap_or_default()
.into_iter()
.map(|user| user.id)
.collect::<Vec<_>>();
let recipient_channels = RecipientChannel {
channel: channel.clone(),
recipients: recipients.clone(),
};
ws::gateway::util::send_message_channel(
state,
channel_id,
ws::gateway::event::Event::AddDmChannel {
channel,
recipients,
},
);
Ok(Json(recipient_channels))
}

View File

@@ -1,21 +1,11 @@
use axum::Json;
use axum::extract::State;
use axum::response::IntoResponse;
use serde::Serialize;
use crate::entity::channel;
use crate::state::AppState;
use crate::web;
use crate::web::context::UserContext;
use crate::web::entity::user::PartialUser;
#[derive(Debug, sqlx::FromRow, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RecipientChannel {
#[serde(flatten)]
pub channel: channel::Channel,
pub recipients: Vec<PartialUser>,
}
use crate::web::route::user::channel::RecipientChannel;
pub async fn list(
State(state): State<AppState>,
@@ -30,8 +20,7 @@ pub async fn list(
let recipients = match recipients {
Some(recipients) => recipients
.into_iter()
.filter(|user| user.id != context.user_id)
.map(PartialUser::from)
.map(|user| user.id)
.collect(),
None => {
continue;

View File

@@ -1,3 +1,16 @@
mod create;
mod list;
pub use create::create;
pub use list::list;
use serde::Serialize;
use crate::entity::{channel, user};
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct RecipientChannel {
#[serde(flatten)]
pub channel: channel::Channel,
pub recipients: Vec<user::Id>,
}

View File

@@ -11,7 +11,10 @@ pub enum Event {
RemoveServer { server_id: entity::server::Id },
#[serde(rename_all = "camelCase")]
AddDmChannel { channel: entity::channel::Channel },
AddDmChannel {
channel: entity::channel::Channel,
recipients: Vec<entity::user::Id>,
},
#[serde(rename_all = "camelCase")]
RemoveDmChannel { channel_id: entity::channel::Id },
@@ -31,9 +34,7 @@ pub enum Event {
},
#[serde(rename_all = "camelCase")]
RemoveUser {
user_id: entity::user::Id,
},
RemoveUser { user_id: entity::user::Id },
#[serde(rename_all = "camelCase")]
AddServerMember {

View File

@@ -23,6 +23,7 @@ struct PeerState {
peer_id: PeerId,
peer_connection: Arc<webrtc::peer_connection::RTCPeerConnection>,
outgoing_audio_track: Arc<TrackLocalStaticRTP>,
ssrc: u32,
}
struct RoomState {
@@ -136,7 +137,8 @@ pub async fn webrtc_task(
Ok(())
}
#[tracing::instrument(skip(api, room_state, offer_signal), fields(peer_id = %offer_signal.offer.peer_id))]
#[tracing::instrument(skip(api, room_state, offer_signal), fields(peer_id = %offer_signal.offer.peer_id
))]
async fn handle_peer(
api: Arc<API>,
room_state: Arc<RoomState>,
@@ -238,13 +240,11 @@ async fn handle_peer(
peer_id: offer_signal.offer.peer_id,
peer_connection: Arc::clone(&peer_connection),
outgoing_audio_track: Arc::clone(&outgoing_track),
ssrc: 0,
};
{
if let Some((_, old_peer)) = room_state
.peers
.remove(&offer_signal.offer.peer_id)
{
if let Some((_, old_peer)) = room_state.peers.remove(&offer_signal.offer.peer_id) {
let _ = old_peer.peer_connection.close().await;
}
}
@@ -262,13 +262,24 @@ async fn handle_peer(
Ok(())
}
#[tracing::instrument(skip(room_state, track), fields(room_id = %room_state.room_id, peer_id = %peer_id))]
#[tracing::instrument(skip(room_state, track), fields(room_id = %room_state.room_id, peer_id = %peer_id
))]
async fn forward_audio_track(
room_state: Arc<RoomState>,
peer_id: PeerId,
track: Arc<TrackRemote>,
) -> anyhow::Result<()> {
let mut rtp_buf = vec![0u8; 1500];
{
let mut peer_state = room_state
.peers
.get_mut(&peer_id)
.expect("peer state not found");
peer_state.ssrc = track.ssrc();
}
while let Ok((rtp_packet, _attr)) = track.read(&mut rtp_buf).await {
let other_peer_tracks = room_state
.peers
@@ -276,7 +287,10 @@ async fn forward_audio_track(
.filter_map(|pair| {
let peer_state = pair.value();
if peer_state.peer_id != peer_id {
Some(peer_state.outgoing_audio_track.clone())
let mut rtp_packet = rtp_packet.clone();
rtp_packet.header.ssrc = peer_state.ssrc;
Some((peer_state.outgoing_audio_track.clone(), rtp_packet))
} else {
None
}
@@ -290,7 +304,7 @@ async fn forward_audio_track(
let write_futures = other_peer_tracks
.iter()
.map(|outgoing_track| outgoing_track.write_rtp(&rtp_packet));
.map(|(outgoing_track, packet)| outgoing_track.write_rtp(&packet));
let results = futures::future::join_all(write_futures).await;
for result in results {