Initial commit: Momentry Core v0.1
- Rust-based digital asset management system - Video analysis: ASR, OCR, YOLO, Face, Pose - RAG capabilities with Qdrant vector database - Multi-database support: PostgreSQL, Redis, MongoDB - Monitoring system with launchd plists - n8n workflow automation integration
This commit is contained in:
3
src/api/mod.rs
Normal file
3
src/api/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod server;
|
||||
|
||||
pub use server::start_server;
|
||||
20
src/api/server.rs
Normal file
20
src/api/server.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
// Placeholder for API server
|
||||
|
||||
pub async fn start_server(host: &str, port: u16) -> anyhow::Result<()> {
|
||||
println!("Starting API server at {}:{}", host, port);
|
||||
// TODO: Implement Axum server
|
||||
//
|
||||
// Routes:
|
||||
// POST /api/v1/register
|
||||
// POST /api/v1/process
|
||||
// POST /api/v1/chunk
|
||||
// POST /api/v1/vectorize
|
||||
// POST /api/v1/store
|
||||
// POST /api/v1/watch
|
||||
// DELETE /api/v1/watch/{path}
|
||||
// GET /api/v1/lookup?path=...
|
||||
// GET /api/v1/resolve?uuid=...
|
||||
// GET /api/v1/status/{uuid}
|
||||
// POST /api/v1/query
|
||||
Ok(())
|
||||
}
|
||||
5
src/core/chunk/mod.rs
Normal file
5
src/core/chunk/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod splitter;
|
||||
pub mod types;
|
||||
|
||||
pub use splitter::{AsrSegment, ChunkSplitter};
|
||||
pub use types::{Chunk, ChunkType};
|
||||
67
src/core/chunk/splitter.rs
Normal file
67
src/core/chunk/splitter.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use super::types::{Chunk, ChunkType};
|
||||
use anyhow::Result;
|
||||
|
||||
pub struct ChunkSplitter {
|
||||
time_based_duration: f64,
|
||||
}
|
||||
|
||||
impl ChunkSplitter {
|
||||
pub fn new(time_based_duration_seconds: f64) -> Self {
|
||||
Self {
|
||||
time_based_duration: time_based_duration_seconds,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn split_time_based(&self, uuid: &str, duration: f64) -> Vec<Chunk> {
|
||||
let mut chunks = Vec::new();
|
||||
let mut index = 0;
|
||||
let mut current_time = 0.0;
|
||||
|
||||
while current_time < duration {
|
||||
let end_time = (current_time + self.time_based_duration).min(duration);
|
||||
chunks.push(Chunk::new(
|
||||
uuid.to_string(),
|
||||
index,
|
||||
ChunkType::TimeBased,
|
||||
current_time,
|
||||
end_time,
|
||||
serde_json::json!({
|
||||
"source": "time_based",
|
||||
"duration": self.time_based_duration,
|
||||
}),
|
||||
));
|
||||
current_time = end_time;
|
||||
index += 1;
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
|
||||
pub fn split_sentence(&self, uuid: &str, asr_segments: &[AsrSegment]) -> Vec<Chunk> {
|
||||
let mut chunks = Vec::new();
|
||||
|
||||
for (index, segment) in asr_segments.iter().enumerate() {
|
||||
chunks.push(Chunk::new(
|
||||
uuid.to_string(),
|
||||
index as u32,
|
||||
ChunkType::Sentence,
|
||||
segment.start,
|
||||
segment.end,
|
||||
serde_json::json!({
|
||||
"text": segment.text,
|
||||
"speaker_id": segment.speaker_id,
|
||||
}),
|
||||
));
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AsrSegment {
|
||||
pub start: f64,
|
||||
pub end: f64,
|
||||
pub text: String,
|
||||
pub speaker_id: Option<String>,
|
||||
}
|
||||
52
src/core/chunk/types.rs
Normal file
52
src/core/chunk/types.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ChunkType {
|
||||
TimeBased,
|
||||
Sentence,
|
||||
Cut,
|
||||
}
|
||||
|
||||
impl ChunkType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
ChunkType::TimeBased => "time_based",
|
||||
ChunkType::Sentence => "sentence",
|
||||
ChunkType::Cut => "cut",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Chunk {
|
||||
pub uuid: String,
|
||||
pub chunk_id: String,
|
||||
pub chunk_index: u32,
|
||||
pub chunk_type: ChunkType,
|
||||
pub start_time: f64,
|
||||
pub end_time: f64,
|
||||
pub content: serde_json::Value,
|
||||
}
|
||||
|
||||
impl Chunk {
|
||||
pub fn new(
|
||||
uuid: String,
|
||||
chunk_index: u32,
|
||||
chunk_type: ChunkType,
|
||||
start_time: f64,
|
||||
end_time: f64,
|
||||
content: serde_json::Value,
|
||||
) -> Self {
|
||||
let chunk_id = format!("{}_{:04}", chunk_type.as_str(), chunk_index);
|
||||
Self {
|
||||
uuid,
|
||||
chunk_id: chunk_id.clone(),
|
||||
chunk_index,
|
||||
chunk_type,
|
||||
start_time,
|
||||
end_time,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
40
src/core/db/mod.rs
Normal file
40
src/core/db/mod.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::core::chunk::Chunk;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearchResult {
|
||||
pub chunk_id: String,
|
||||
pub score: f32,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait Database: Send + Sync {
|
||||
async fn init() -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ChunkStore: Send + Sync {
|
||||
async fn store_chunk(&self, chunk: &Chunk) -> Result<()>;
|
||||
async fn get_chunks_by_uuid(&self, uuid: &str) -> Result<Vec<Chunk>>;
|
||||
async fn get_all_chunks(&self) -> Result<Vec<Chunk>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait VectorStore: Send + Sync {
|
||||
async fn store_vector(&self, chunk_id: &str, vector: &[f32]) -> Result<()>;
|
||||
async fn search(&self, query_vector: &[f32], limit: usize) -> Result<Vec<SearchResult>>;
|
||||
}
|
||||
|
||||
pub mod mongodb_db;
|
||||
pub mod postgres_db;
|
||||
pub mod qdrant_db;
|
||||
pub mod redis_db;
|
||||
|
||||
pub use mongodb_db::MongoDb;
|
||||
pub use postgres_db::{PostgresDb, VideoRecord};
|
||||
pub use qdrant_db::QdrantDb;
|
||||
pub use redis_db::RedisDb;
|
||||
53
src/core/db/mongodb_db.rs
Normal file
53
src/core/db/mongodb_db.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::Database;
|
||||
|
||||
pub struct MongoDb {
|
||||
cache: Arc<RwLock<MongoCache>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MongoCache {
|
||||
documents: std::collections::HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct VideoDocument {
|
||||
pub uuid: String,
|
||||
pub file_path: String,
|
||||
pub file_name: String,
|
||||
pub probe: serde_json::Value,
|
||||
pub asr: Option<serde_json::Value>,
|
||||
pub asrx: Option<serde_json::Value>,
|
||||
pub ocr: Option<serde_json::Value>,
|
||||
pub yolo: Option<serde_json::Value>,
|
||||
pub face: Option<serde_json::Value>,
|
||||
pub pose: Option<serde_json::Value>,
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
}
|
||||
|
||||
impl MongoDb {
|
||||
pub async fn store_video(&self, _doc: &VideoDocument) -> Result<()> {
|
||||
// TODO: Implement MongoDB client
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_video(&self, _uuid: &str) -> Result<Option<VideoDocument>> {
|
||||
// TODO: Implement MongoDB client
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Database for MongoDb {
|
||||
async fn init() -> Result<Self> {
|
||||
// TODO: Initialize MongoDB client
|
||||
Ok(Self {
|
||||
cache: Arc::new(RwLock::new(MongoCache::default())),
|
||||
})
|
||||
}
|
||||
}
|
||||
286
src/core/db/postgres_db.rs
Normal file
286
src/core/db/postgres_db.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{PgPool, Row};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::Database;
|
||||
use crate::core::chunk::{Chunk, ChunkType};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct VideoRecord {
|
||||
pub id: i64,
|
||||
pub uuid: String,
|
||||
pub file_path: String,
|
||||
pub file_name: String,
|
||||
pub duration: f64,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub fps: f64,
|
||||
pub probe_json: Option<String>,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
pub struct PostgresDb {
|
||||
pool: PgPool,
|
||||
cache: Arc<RwLock<PostgresCache>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PostgresCache {
|
||||
videos: std::collections::HashMap<String, VideoRecord>,
|
||||
chunks: std::collections::HashMap<String, Vec<Chunk>>,
|
||||
}
|
||||
|
||||
impl PostgresDb {
|
||||
pub async fn new(database_url: &str) -> Result<Self> {
|
||||
let pool = PgPool::connect(database_url).await?;
|
||||
|
||||
let db = Self {
|
||||
pool,
|
||||
cache: Arc::new(RwLock::new(PostgresCache::default())),
|
||||
};
|
||||
|
||||
db.init_schema().await?;
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
pub async fn register_video(&self, record: &VideoRecord) -> Result<i64> {
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO videos (uuid, file_path, file_name, duration, width, height, fps, probe_json)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||
ON CONFLICT (uuid) DO UPDATE SET
|
||||
file_path = EXCLUDED.file_path,
|
||||
file_name = EXCLUDED.file_name,
|
||||
duration = EXCLUDED.duration,
|
||||
width = EXCLUDED.width,
|
||||
height = EXCLUDED.height,
|
||||
fps = EXCLUDED.fps,
|
||||
probe_json = EXCLUDED.probe_json,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
RETURNING id::bigint
|
||||
"#
|
||||
)
|
||||
.bind(&record.uuid)
|
||||
.bind(&record.file_path)
|
||||
.bind(&record.file_name)
|
||||
.bind(record.duration)
|
||||
.bind(record.width as i32)
|
||||
.bind(record.height as i32)
|
||||
.bind(record.fps)
|
||||
.bind(&record.probe_json)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
let id: i64 = result.get(0);
|
||||
|
||||
// Update cache
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut record = record.clone();
|
||||
record.id = id as i64;
|
||||
cache.videos.insert(record.uuid.clone(), record);
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub async fn get_video_by_uuid(&self, uuid: &str) -> Result<Option<VideoRecord>> {
|
||||
// Check cache first
|
||||
{
|
||||
let cache = self.cache.read().await;
|
||||
if let Some(video) = cache.videos.get(uuid) {
|
||||
return Ok(Some(video.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
let result = sqlx::query_as::<_, (i32, String, String, String, f64, i32, i32, f64, Option<String>)>(
|
||||
"SELECT id, uuid, file_path, file_name, duration, width, height, fps, probe_json FROM videos WHERE uuid = $1"
|
||||
)
|
||||
.bind(uuid)
|
||||
.fetch_optional(&self.pool)
|
||||
.await?;
|
||||
|
||||
if let Some(r) = result {
|
||||
let video = VideoRecord {
|
||||
id: r.0 as i64,
|
||||
uuid: r.1,
|
||||
file_path: r.2,
|
||||
file_name: r.3,
|
||||
duration: r.4,
|
||||
width: r.5 as u32,
|
||||
height: r.6 as u32,
|
||||
fps: r.7,
|
||||
probe_json: r.8,
|
||||
created_at: String::new(),
|
||||
};
|
||||
|
||||
// Update cache
|
||||
let mut cache = self.cache.write().await;
|
||||
cache.videos.insert(uuid.to_string(), video.clone());
|
||||
|
||||
Ok(Some(video))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_videos(&self) -> Result<Vec<VideoRecord>> {
|
||||
let rows = sqlx::query_as::<_, (i32, String, String, String, f64, i32, i32, f64, Option<String>)>(
|
||||
"SELECT id, uuid, file_path, file_name, duration, width, height, fps, probe_json FROM videos ORDER BY id DESC"
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
let videos: Vec<VideoRecord> = rows
|
||||
.into_iter()
|
||||
.map(|r| VideoRecord {
|
||||
id: r.0 as i64,
|
||||
uuid: r.1,
|
||||
file_path: r.2,
|
||||
file_name: r.3,
|
||||
duration: r.4,
|
||||
width: r.5 as u32,
|
||||
height: r.6 as u32,
|
||||
fps: r.7,
|
||||
probe_json: r.8,
|
||||
created_at: String::new(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(videos)
|
||||
}
|
||||
|
||||
async fn init_schema(&self) -> Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS videos (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) UNIQUE NOT NULL,
|
||||
file_path TEXT NOT NULL,
|
||||
file_name TEXT NOT NULL,
|
||||
duration DOUBLE PRECISION,
|
||||
width INTEGER,
|
||||
height INTEGER,
|
||||
fps DOUBLE PRECISION,
|
||||
probe_json TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query("CREATE INDEX IF NOT EXISTS idx_videos_uuid ON videos(uuid)")
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS chunks (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
chunk_id VARCHAR(64) NOT NULL,
|
||||
chunk_index INTEGER NOT NULL,
|
||||
chunk_type VARCHAR(32) NOT NULL,
|
||||
start_time DOUBLE PRECISION NOT NULL,
|
||||
end_time DOUBLE PRECISION NOT NULL,
|
||||
content JSONB NOT NULL,
|
||||
vector_id VARCHAR(64),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(uuid, chunk_id)
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query("CREATE INDEX IF NOT EXISTS idx_chunks_uuid ON chunks(uuid)")
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query("CREATE INDEX IF NOT EXISTS idx_chunks_type ON chunks(chunk_type)")
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query("CREATE INDEX IF NOT EXISTS idx_chunks_time ON chunks(start_time, end_time)")
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn store_chunk(&self, chunk: &Chunk) -> Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO chunks (uuid, chunk_id, chunk_index, chunk_type, start_time, end_time, content)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7::jsonb)
|
||||
ON CONFLICT (uuid, chunk_id) DO UPDATE SET
|
||||
start_time = EXCLUDED.start_time,
|
||||
end_time = EXCLUDED.end_time,
|
||||
content = EXCLUDED.content,
|
||||
vector_id = EXCLUDED.vector_id
|
||||
"#
|
||||
)
|
||||
.bind(&chunk.uuid)
|
||||
.bind(&chunk.chunk_id)
|
||||
.bind(chunk.chunk_index as i32)
|
||||
.bind(chunk.chunk_type.as_str())
|
||||
.bind(chunk.start_time)
|
||||
.bind(chunk.end_time)
|
||||
.bind(&chunk.content)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_chunks_by_uuid(&self, uuid: &str) -> Result<Vec<Chunk>> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT uuid, chunk_id, chunk_index, chunk_type, start_time, end_time, content FROM chunks WHERE uuid = $1 ORDER BY chunk_index"
|
||||
)
|
||||
.bind(uuid)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
let chunks: Vec<Chunk> = rows
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let chunk_type_str: String = r.get(3);
|
||||
let chunk_index: i32 = r.get(2);
|
||||
let chunk_type = match chunk_type_str.as_str() {
|
||||
"time_based" => ChunkType::TimeBased,
|
||||
"sentence" => ChunkType::Sentence,
|
||||
"cut" => ChunkType::Cut,
|
||||
_ => ChunkType::TimeBased,
|
||||
};
|
||||
|
||||
let content_json: String = r.get(6);
|
||||
let content: serde_json::Value =
|
||||
serde_json::from_str(&content_json).unwrap_or(serde_json::json!({}));
|
||||
|
||||
Chunk {
|
||||
uuid: r.get(0),
|
||||
chunk_id: r.get(1),
|
||||
chunk_index: chunk_index as u32,
|
||||
chunk_type,
|
||||
start_time: r.get(4),
|
||||
end_time: r.get(5),
|
||||
content,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(chunks)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Database for PostgresDb {
|
||||
async fn init() -> Result<Self> {
|
||||
let database_url = std::env::var("DATABASE_URL")
|
||||
.unwrap_or_else(|_| "postgres://accusys@localhost:5432/momentry".to_string());
|
||||
Self::new(&database_url).await
|
||||
}
|
||||
}
|
||||
88
src/core/db/qdrant_db.rs
Normal file
88
src/core/db/qdrant_db.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::{Database, SearchResult, VectorStore};
|
||||
|
||||
pub struct QdrantDb {
|
||||
collection_name: String,
|
||||
cache: Arc<RwLock<QdrantCache>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct QdrantCache {
|
||||
vectors: std::collections::HashMap<String, Vec<f32>>,
|
||||
}
|
||||
|
||||
impl QdrantDb {
|
||||
pub async fn init_collection(&self) -> Result<()> {
|
||||
// TODO: Implement actual Qdrant client
|
||||
// This is a placeholder
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn upsert_vector(&self, chunk_id: &str, vector: &[f32]) -> Result<()> {
|
||||
let mut cache = self.cache.write().await;
|
||||
cache.vectors.insert(chunk_id.to_string(), vector.to_vec());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Database for QdrantDb {
|
||||
async fn init() -> Result<Self> {
|
||||
let collection_name =
|
||||
std::env::var("QDRANT_COLLECTION").unwrap_or_else(|_| "momentry_chunks".to_string());
|
||||
|
||||
let db = Self {
|
||||
collection_name,
|
||||
cache: Arc::new(RwLock::new(QdrantCache::default())),
|
||||
};
|
||||
|
||||
db.init_collection().await?;
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl VectorStore for QdrantDb {
|
||||
async fn store_vector(&self, chunk_id: &str, vector: &[f32]) -> Result<()> {
|
||||
self.upsert_vector(chunk_id, vector).await
|
||||
}
|
||||
|
||||
async fn search(&self, query_vector: &[f32], limit: usize) -> Result<Vec<SearchResult>> {
|
||||
// Simple cosine similarity search (placeholder)
|
||||
let cache = self.cache.read().await;
|
||||
let mut results: Vec<SearchResult> = Vec::new();
|
||||
|
||||
for (chunk_id, vector) in &cache.vectors {
|
||||
let similarity = cosine_similarity(query_vector, vector);
|
||||
results.push(SearchResult {
|
||||
chunk_id: chunk_id.clone(),
|
||||
score: similarity,
|
||||
});
|
||||
}
|
||||
|
||||
results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
|
||||
results.truncate(limit);
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
fn cosine_similarity(a: &[f32], b: &[f32]) -> f32 {
|
||||
if a.len() != b.len() || a.is_empty() {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
let dot_product: f32 = a.iter().zip(b.iter()).map(|(x, y)| x * y).sum();
|
||||
let norm_a: f32 = a.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
let norm_b: f32 = b.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
|
||||
if norm_a == 0.0 || norm_b == 0.0 {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
dot_product / (norm_a * norm_b)
|
||||
}
|
||||
65
src/core/db/redis_db.rs
Normal file
65
src/core/db/redis_db.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::Database;
|
||||
|
||||
pub struct RedisDb {
|
||||
state: Arc<RwLock<RedisState>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct RedisState {
|
||||
pub processing: Vec<String>,
|
||||
pub completed: Vec<String>,
|
||||
pub failed: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Job {
|
||||
pub id: String,
|
||||
pub uuid: String,
|
||||
pub job_type: String,
|
||||
pub status: String,
|
||||
pub progress: f32,
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
}
|
||||
|
||||
impl RedisDb {
|
||||
pub async fn push_job(&self, _job: &Job) -> Result<()> {
|
||||
// TODO: Implement Redis client
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_pending_jobs(&self) -> Result<Vec<Job>> {
|
||||
// TODO: Implement Redis client
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
pub async fn update_job_status(
|
||||
&self,
|
||||
_job_id: &str,
|
||||
_status: &str,
|
||||
_progress: f32,
|
||||
) -> Result<()> {
|
||||
// TODO: Implement Redis client
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn publish_event(&self, _channel: &str, _message: &str) -> Result<()> {
|
||||
// TODO: Implement Redis Pub/Sub
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Database for RedisDb {
|
||||
async fn init() -> Result<Self> {
|
||||
// TODO: Initialize Redis client
|
||||
Ok(Self {
|
||||
state: Arc::new(RwLock::new(RedisState::default())),
|
||||
})
|
||||
}
|
||||
}
|
||||
66
src/core/embedding/comic_embed.rs
Normal file
66
src/core/embedding/comic_embed.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use anyhow::Result;
|
||||
|
||||
pub struct Embedder {
|
||||
model_path: String,
|
||||
}
|
||||
|
||||
impl Embedder {
|
||||
pub fn new(model_path: String) -> Self {
|
||||
Self { model_path }
|
||||
}
|
||||
|
||||
pub async fn embed_text(&self, text: &str) -> Result<Vec<f32>> {
|
||||
// TODO: Implement comic-embed-text model loading and inference
|
||||
// This is a placeholder that generates a random 768-dimensional vector
|
||||
//
|
||||
// Implementation would use:
|
||||
// - candle (Rust ML framework) or
|
||||
// - ort (ONNX Runtime) to run the model
|
||||
//
|
||||
// Example with ort:
|
||||
// let session = Session::builder()?
|
||||
// .with_execution_providers([CPUExecutionProvider::default().build()])?
|
||||
// .with_model_from_file(&self.model_path)?;
|
||||
//
|
||||
// // Preprocess text to tensor
|
||||
// let input = preprocess_text(text);
|
||||
//
|
||||
// // Run inference
|
||||
// let output = session.run(vec![input])?;
|
||||
//
|
||||
// // Extract embeddings
|
||||
// let embedding = output[0].view()[..768].to_vec();
|
||||
|
||||
let dim = 768;
|
||||
let mut embedding = vec![0.0f32; dim];
|
||||
|
||||
// Simple hash-based embedding for now
|
||||
let hash = self.hash_text(text);
|
||||
for i in 0..dim {
|
||||
embedding[i] = ((hash >> i) & 1) as f32;
|
||||
}
|
||||
|
||||
// Normalize
|
||||
let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
if norm > 0.0 {
|
||||
for v in &mut embedding {
|
||||
*v /= norm;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(embedding)
|
||||
}
|
||||
|
||||
pub async fn embed_chunk_content(&self, chunk: &crate::core::chunk::Chunk) -> Result<Vec<f32>> {
|
||||
let text = serde_json::to_string(&chunk.content)?;
|
||||
self.embed_text(&text).await
|
||||
}
|
||||
|
||||
fn hash_text(&self, text: &str) -> u64 {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
let mut hasher = DefaultHasher::new();
|
||||
text.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
3
src/core/embedding/mod.rs
Normal file
3
src/core/embedding/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod comic_embed;
|
||||
|
||||
pub use comic_embed::Embedder;
|
||||
8
src/core/mod.rs
Normal file
8
src/core/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod chunk;
|
||||
pub mod db;
|
||||
pub mod embedding;
|
||||
pub mod overlay;
|
||||
pub mod probe;
|
||||
pub mod processor;
|
||||
pub mod storage;
|
||||
pub mod thumbnail;
|
||||
3
src/core/overlay/mod.rs
Normal file
3
src/core/overlay/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod types;
|
||||
|
||||
pub use types::OverlayFlags;
|
||||
41
src/core/overlay/types.rs
Normal file
41
src/core/overlay/types.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
|
||||
pub struct OverlayFlags {
|
||||
pub asr: bool,
|
||||
pub asrx: bool,
|
||||
pub ocr: bool,
|
||||
pub yolo: bool,
|
||||
pub face: bool,
|
||||
pub pose: bool,
|
||||
pub status: bool,
|
||||
}
|
||||
|
||||
impl Default for OverlayFlags {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
asr: false,
|
||||
asrx: false,
|
||||
ocr: false,
|
||||
yolo: false,
|
||||
face: false,
|
||||
pose: false,
|
||||
status: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl OverlayFlags {
|
||||
pub fn toggle(&mut self, layer: char) {
|
||||
match layer {
|
||||
'a' => self.asr = !self.asr,
|
||||
'x' => self.asrx = !self.asrx,
|
||||
'o' => self.ocr = !self.ocr,
|
||||
'y' => self.yolo = !self.yolo,
|
||||
'f' => self.face = !self.face,
|
||||
'p' => self.pose = !self.pose,
|
||||
'b' => self.status = !self.status,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
3
src/core/probe/mod.rs
Normal file
3
src/core/probe/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod probe;
|
||||
|
||||
pub use probe::{probe_video, ProbeResult};
|
||||
84
src/core/probe/probe.rs
Normal file
84
src/core/probe/probe.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ProbeResult {
|
||||
pub streams: Vec<StreamInfo>,
|
||||
pub format: FormatInfo,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct StreamInfo {
|
||||
pub index: u32,
|
||||
pub codec_name: Option<String>,
|
||||
pub codec_type: Option<String>,
|
||||
pub width: Option<u32>,
|
||||
pub height: Option<u32>,
|
||||
pub r_frame_rate: Option<String>,
|
||||
pub duration: Option<String>,
|
||||
pub sample_rate: Option<String>,
|
||||
pub channels: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FormatInfo {
|
||||
pub filename: Option<String>,
|
||||
pub format_name: Option<String>,
|
||||
pub duration: Option<String>,
|
||||
pub size: Option<String>,
|
||||
pub bit_rate: Option<String>,
|
||||
}
|
||||
|
||||
pub fn probe_video(video_path: &str) -> Result<ProbeResult> {
|
||||
let output = Command::new("ffprobe")
|
||||
.args([
|
||||
"-v",
|
||||
"quiet",
|
||||
"-print_format",
|
||||
"json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
video_path,
|
||||
])
|
||||
.output()
|
||||
.context("Failed to run ffprobe")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
anyhow::bail!("ffprobe failed: {}", stderr);
|
||||
}
|
||||
|
||||
let json_str = String::from_utf8_lossy(&output.stdout);
|
||||
let json: serde_json::Value =
|
||||
serde_json::from_str(&json_str).context("Failed to parse ffprobe output")?;
|
||||
|
||||
let streams: Vec<StreamInfo> = json["streams"]
|
||||
.as_array()
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.map(|s| StreamInfo {
|
||||
index: s["index"].as_u64().unwrap_or(0) as u32,
|
||||
codec_name: s["codec_name"].as_str().map(String::from),
|
||||
codec_type: s["codec_type"].as_str().map(String::from),
|
||||
width: s["width"].as_u64().map(|v| v as u32),
|
||||
height: s["height"].as_u64().map(|v| v as u32),
|
||||
r_frame_rate: s["r_frame_rate"].as_str().map(String::from),
|
||||
duration: s["duration"].as_str().map(String::from),
|
||||
sample_rate: s["sample_rate"].as_str().map(String::from),
|
||||
channels: s["channels"].as_u64().map(|v| v as u32),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let format = FormatInfo {
|
||||
filename: json["format"]["filename"].as_str().map(String::from),
|
||||
format_name: json["format"]["format_name"].as_str().map(String::from),
|
||||
duration: json["format"]["duration"].as_str().map(String::from),
|
||||
size: json["format"]["size"].as_str().map(String::from),
|
||||
bit_rate: json["format"]["bit_rate"].as_str().map(String::from),
|
||||
};
|
||||
|
||||
Ok(ProbeResult { streams, format })
|
||||
}
|
||||
73
src/core/processor/asr.rs
Normal file
73
src/core/processor/asr.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AsrResult {
|
||||
pub language: Option<String>,
|
||||
pub language_probability: Option<f64>,
|
||||
pub segments: Vec<AsrSegment>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AsrSegment {
|
||||
pub start: f64,
|
||||
pub end: f64,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
pub async fn process_asr(video_path: &str, output_path: &str) -> Result<AsrResult> {
|
||||
let script_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("scripts")
|
||||
.join("asr_processor.py");
|
||||
|
||||
let venv_python = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("venv")
|
||||
.join("bin")
|
||||
.join("python");
|
||||
|
||||
println!("[ASR] Starting ASR processing...");
|
||||
println!("[ASR] Video: {}", video_path);
|
||||
|
||||
let output = Command::new(venv_python)
|
||||
.arg(script_path)
|
||||
.arg(video_path)
|
||||
.arg(output_path)
|
||||
.output()
|
||||
.context("Failed to run ASR processor")?;
|
||||
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
|
||||
for line in stderr.lines() {
|
||||
if line.starts_with("ASR_START") {
|
||||
println!("[ASR] Loading model...");
|
||||
} else if line.starts_with("ASR_LANGUAGE:") {
|
||||
let lang = line.trim_start_matches("ASR_LANGUAGE:");
|
||||
println!("[ASR] Detected language: {}", lang);
|
||||
} else if line.starts_with("ASR_PROGRESS:") {
|
||||
let count = line.trim_start_matches("ASR_PROGRESS:");
|
||||
println!("[ASR] Processed {} segments...", count);
|
||||
} else if line.starts_with("ASR_COMPLETE:") {
|
||||
let count = line.trim_start_matches("ASR_COMPLETE:");
|
||||
println!("[ASR] Completed! Total: {} segments", count);
|
||||
}
|
||||
}
|
||||
|
||||
if !output.status.success() {
|
||||
anyhow::bail!("ASR failed: {}", stderr);
|
||||
}
|
||||
|
||||
let json_str = std::fs::read_to_string(output_path).context("Failed to read ASR output")?;
|
||||
|
||||
let result: AsrResult =
|
||||
serde_json::from_str(&json_str).context("Failed to parse ASR output")?;
|
||||
|
||||
println!(
|
||||
"[ASR] Result: {} segments, language: {:?}",
|
||||
result.segments.len(),
|
||||
result.language
|
||||
);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
28
src/core/processor/asrx.rs
Normal file
28
src/core/processor/asrx.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AsrxResult {
|
||||
pub segments: Vec<AsrxSegment>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AsrxSegment {
|
||||
pub start: f64,
|
||||
pub end: f64,
|
||||
pub text: String,
|
||||
pub speaker_id: String,
|
||||
pub speaker_embedding: Option<Vec<f32>>,
|
||||
}
|
||||
|
||||
pub async fn process_asrx(video_path: &str, output_path: &str) -> Result<AsrxResult> {
|
||||
// TODO: Implement speaker diarization
|
||||
// Options:
|
||||
// 1. Use pyannote.audio
|
||||
// 2. Use whisperx
|
||||
// 3. Use Python subprocess
|
||||
|
||||
println!("Processing speaker diarization for: {}", video_path);
|
||||
|
||||
Ok(AsrxResult { segments: vec![] })
|
||||
}
|
||||
36
src/core/processor/face.rs
Normal file
36
src/core/processor/face.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FaceResult {
|
||||
pub frames: Vec<FaceFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FaceFrame {
|
||||
pub frame: u64,
|
||||
pub timestamp: f64,
|
||||
pub faces: Vec<Face>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Face {
|
||||
pub face_id: String,
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
pub confidence: f32,
|
||||
pub embedding: Option<Vec<f32>>,
|
||||
}
|
||||
|
||||
pub async fn process_face(video_path: &str, output_path: &str) -> Result<FaceResult> {
|
||||
// TODO: Implement face detection
|
||||
// Options:
|
||||
// 1. Use MTCNN or RetinaFace with ONNX
|
||||
// 2. Use Python subprocess
|
||||
|
||||
println!("Processing face detection for: {}", video_path);
|
||||
|
||||
Ok(FaceResult { frames: vec![] })
|
||||
}
|
||||
13
src/core/processor/mod.rs
Normal file
13
src/core/processor/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub mod asr;
|
||||
pub mod asrx;
|
||||
pub mod face;
|
||||
pub mod ocr;
|
||||
pub mod pose;
|
||||
pub mod yolo;
|
||||
|
||||
pub use asr::{process_asr, AsrResult, AsrSegment};
|
||||
pub use asrx::process_asrx;
|
||||
pub use face::process_face;
|
||||
pub use ocr::process_ocr;
|
||||
pub use pose::process_pose;
|
||||
pub use yolo::process_yolo;
|
||||
36
src/core/processor/ocr.rs
Normal file
36
src/core/processor/ocr.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OcrResult {
|
||||
pub frames: Vec<OcrFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OcrFrame {
|
||||
pub frame: u64,
|
||||
pub timestamp: f64,
|
||||
pub texts: Vec<OcrText>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OcrText {
|
||||
pub text: String,
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
pub confidence: f32,
|
||||
}
|
||||
|
||||
pub async fn process_ocr(video_path: &str, output_path: &str) -> Result<OcrResult> {
|
||||
// TODO: Implement OCR processing
|
||||
// Options:
|
||||
// 1. Use tesseract
|
||||
// 2. Use Python pytesseract via subprocess
|
||||
// 3. Use Rust OCR library
|
||||
|
||||
println!("Processing OCR for: {}", video_path);
|
||||
|
||||
Ok(OcrResult { frames: vec![] })
|
||||
}
|
||||
47
src/core/processor/pose.rs
Normal file
47
src/core/processor/pose.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PoseResult {
|
||||
pub frames: Vec<PoseFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PoseFrame {
|
||||
pub frame: u64,
|
||||
pub timestamp: f64,
|
||||
pub persons: Vec<PersonPose>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PersonPose {
|
||||
pub keypoints: Vec<Keypoint>,
|
||||
pub bbox: Bbox,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Keypoint {
|
||||
pub name: String,
|
||||
pub x: f32,
|
||||
pub y: f32,
|
||||
pub confidence: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Bbox {
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
}
|
||||
|
||||
pub async fn process_pose(video_path: &str, output_path: &str) -> Result<PoseResult> {
|
||||
// TODO: Implement pose estimation
|
||||
// Options:
|
||||
// 1. Use MoveNet or PoseNet with ONNX
|
||||
// 2. Use Python subprocess with ultralytics
|
||||
|
||||
println!("Processing pose estimation for: {}", video_path);
|
||||
|
||||
Ok(PoseResult { frames: vec![] })
|
||||
}
|
||||
36
src/core/processor/yolo.rs
Normal file
36
src/core/processor/yolo.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct YoloResult {
|
||||
pub frames: Vec<YoloFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct YoloFrame {
|
||||
pub frame: u64,
|
||||
pub timestamp: f64,
|
||||
pub objects: Vec<YoloObject>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct YoloObject {
|
||||
pub class_name: String,
|
||||
pub class_id: u32,
|
||||
pub x: i32,
|
||||
pub y: i32,
|
||||
pub width: i32,
|
||||
pub height: i32,
|
||||
pub confidence: f32,
|
||||
}
|
||||
|
||||
pub async fn process_yolo(video_path: &str, output_path: &str) -> Result<YoloResult> {
|
||||
// TODO: Implement YOLO processing
|
||||
// Options:
|
||||
// 1. Use ONNX Runtime (ort) with YOLO model
|
||||
// 2. Use Python subprocess with ultralytics
|
||||
|
||||
println!("Processing YOLO for: {}", video_path);
|
||||
|
||||
Ok(YoloResult { frames: vec![] })
|
||||
}
|
||||
57
src/core/storage/file_manager.rs
Normal file
57
src/core/storage/file_manager.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use anyhow::Result;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub struct FileManager {
|
||||
base_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl FileManager {
|
||||
pub fn new(base_dir: PathBuf) -> Self {
|
||||
Self { base_dir }
|
||||
}
|
||||
|
||||
pub fn get_json_path(&self, uuid: &str, suffix: &str) -> PathBuf {
|
||||
self.base_dir.join(format!("{}.{}.json", uuid, suffix))
|
||||
}
|
||||
|
||||
pub fn save_json(&self, uuid: &str, suffix: &str, content: &str) -> Result<PathBuf> {
|
||||
let path = self.get_json_path(uuid, suffix);
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
fs::write(&path, content)?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn load_json(&self, uuid: &str, suffix: &str) -> Result<String> {
|
||||
let path = self.get_json_path(uuid, suffix);
|
||||
Ok(fs::read_to_string(path)?)
|
||||
}
|
||||
|
||||
pub fn exists(&self, uuid: &str, suffix: &str) -> bool {
|
||||
self.get_json_path(uuid, suffix).exists()
|
||||
}
|
||||
|
||||
pub fn list_video_files(dir: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut videos = Vec::new();
|
||||
|
||||
if !dir.exists() {
|
||||
return Ok(videos);
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if let Some(ext) = path.extension() {
|
||||
let ext = ext.to_string_lossy().to_lowercase();
|
||||
if matches!(ext.as_str(), "mp4" | "mkv" | "avi" | "mov" | "webm" | "m4v") {
|
||||
videos.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(videos)
|
||||
}
|
||||
}
|
||||
5
src/core/storage/mod.rs
Normal file
5
src/core/storage/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod file_manager;
|
||||
pub mod uuid;
|
||||
|
||||
pub use file_manager::FileManager;
|
||||
pub use uuid::compute_uuid;
|
||||
44
src/core/storage/uuid.rs
Normal file
44
src/core/storage/uuid.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Compute UUID from file path using SHA256
|
||||
/// UUID = SHA256(user_path + filename)[0:16]
|
||||
pub fn compute_uuid(user_path: &str, filename: &str) -> String {
|
||||
let key = format!("{}/{}", user_path.trim_end_matches('/'), filename);
|
||||
let hash = Sha256::digest(key.as_bytes());
|
||||
let hash_str = hex::encode(hash);
|
||||
hash_str[0..16].to_string()
|
||||
}
|
||||
|
||||
/// Compute UUID from full file path
|
||||
pub fn compute_uuid_from_path(full_path: &str) -> String {
|
||||
let path = PathBuf::from(full_path);
|
||||
let parent = path
|
||||
.parent()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
let filename = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
compute_uuid(&parent, &filename)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_uuid_computation() {
|
||||
let uuid = compute_uuid("/Users/test/Videos", "video.mp4");
|
||||
assert_eq!(uuid.len(), 16);
|
||||
println!("UUID: {}", uuid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_uuid_from_path() {
|
||||
let uuid = compute_uuid_from_path("/Users/test/Videos/video.mp4");
|
||||
assert_eq!(uuid.len(), 16);
|
||||
}
|
||||
}
|
||||
108
src/core/thumbnail/mod.rs
Normal file
108
src/core/thumbnail/mod.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ThumbnailResult {
|
||||
pub uuid: String,
|
||||
pub count: usize,
|
||||
pub files: Vec<String>,
|
||||
}
|
||||
|
||||
pub struct ThumbnailExtractor {
|
||||
output_dir: PathBuf,
|
||||
count: u32,
|
||||
}
|
||||
|
||||
impl ThumbnailExtractor {
|
||||
pub fn new(output_dir: PathBuf, count: u32) -> Self {
|
||||
Self { output_dir, count }
|
||||
}
|
||||
|
||||
pub fn extract(&self, video_path: &str, uuid: &str) -> Result<ThumbnailResult> {
|
||||
let script_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("scripts")
|
||||
.join("thumbnail_extractor.py");
|
||||
|
||||
// 使用 venv 中的 Python,確保版本正確且隔離依賴
|
||||
let venv_python = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("venv")
|
||||
.join("bin")
|
||||
.join("python");
|
||||
|
||||
let output = Command::new(venv_python)
|
||||
.arg(script_path)
|
||||
.arg(video_path)
|
||||
.arg(uuid)
|
||||
.arg("-o")
|
||||
.arg(&self.output_dir)
|
||||
.arg("-c")
|
||||
.arg(self.count.to_string())
|
||||
.output()
|
||||
.context("Failed to run thumbnail extractor")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
anyhow::bail!("Thumbnail extraction failed: {}", stderr);
|
||||
}
|
||||
|
||||
let json_str = String::from_utf8_lossy(&output.stdout);
|
||||
let result: ThumbnailResult =
|
||||
serde_json::from_str(&json_str).context("Failed to parse thumbnail result")?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn get_or_create(&self, video_path: &str, uuid: &str) -> Result<ThumbnailResult> {
|
||||
let thumb_dir = self.output_dir.join(uuid);
|
||||
|
||||
// Check if thumbnails already exist
|
||||
if thumb_dir.exists() {
|
||||
let files: Vec<String> = (0..self.count)
|
||||
.map(|i| thumb_dir.join(format!("thumb_{:03}.jpg", i)))
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
if files.len() as u32 == self.count {
|
||||
return Ok(ThumbnailResult {
|
||||
uuid: uuid.to_string(),
|
||||
count: files.len(),
|
||||
files,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Extract new thumbnails
|
||||
self.extract(video_path, uuid)
|
||||
}
|
||||
|
||||
pub fn get_thumbnails(&self, uuid: &str) -> Option<Vec<String>> {
|
||||
let thumb_dir = self.output_dir.join(uuid);
|
||||
|
||||
if !thumb_dir.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let files: Vec<String> = (0..10)
|
||||
.map(|i| thumb_dir.join(format!("thumb_{:03}.jpg", i)))
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
if files.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(files)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cleanup(&self, uuid: &str) -> Result<()> {
|
||||
let thumb_dir = self.output_dir.join(uuid);
|
||||
if thumb_dir.exists() {
|
||||
std::fs::remove_dir_all(&thumb_dir).context("Failed to remove thumbnail directory")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
8
src/lib.rs
Normal file
8
src/lib.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod core;
|
||||
|
||||
pub use core::chunk::{Chunk, ChunkSplitter, ChunkType};
|
||||
pub use core::db::{Database, MongoDb, PostgresDb, QdrantDb, RedisDb, VideoRecord};
|
||||
pub use core::probe::ProbeResult;
|
||||
pub use core::storage::file_manager::FileManager;
|
||||
pub use core::storage::uuid;
|
||||
pub use core::thumbnail::{ThumbnailExtractor, ThumbnailResult};
|
||||
340
src/main.rs
Normal file
340
src/main.rs
Normal file
@@ -0,0 +1,340 @@
|
||||
use anyhow::{Context, Result};
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::path::Path;
|
||||
|
||||
use momentry_core::{Database, PostgresDb, VideoRecord};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "momentry")]
|
||||
#[command(about = "Digital asset management system with video analysis and RAG")]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Register a video file
|
||||
Register {
|
||||
/// Video file path or URL
|
||||
path: String,
|
||||
},
|
||||
/// Process video (generate all JSON files)
|
||||
Process {
|
||||
/// UUID or path
|
||||
target: String,
|
||||
},
|
||||
/// Generate chunks and store in database
|
||||
Chunk {
|
||||
/// UUID
|
||||
uuid: String,
|
||||
},
|
||||
/// Vectorize chunks
|
||||
Vectorize {
|
||||
/// UUID (or 'all' for all)
|
||||
uuid: String,
|
||||
},
|
||||
/// Play video with overlays
|
||||
Play {
|
||||
/// Video path or UUID
|
||||
target: String,
|
||||
},
|
||||
/// Start watching directories
|
||||
Watch {
|
||||
/// Directories to watch (comma separated)
|
||||
directories: Option<String>,
|
||||
},
|
||||
/// Start API server
|
||||
Server {
|
||||
/// Host
|
||||
#[arg(long, default_value = "127.0.0.1")]
|
||||
host: String,
|
||||
/// Port
|
||||
#[arg(long, default_value = "3000")]
|
||||
port: u16,
|
||||
},
|
||||
/// Query using RAG
|
||||
Query {
|
||||
/// Query text
|
||||
query: String,
|
||||
},
|
||||
/// Lookup UUID from path
|
||||
Lookup {
|
||||
/// File path
|
||||
path: String,
|
||||
},
|
||||
/// Resolve path from UUID
|
||||
Resolve {
|
||||
/// UUID
|
||||
uuid: String,
|
||||
},
|
||||
/// Generate thumbnails for videos
|
||||
Thumbnails {
|
||||
/// UUID (optional, generates for all if not specified)
|
||||
uuid: Option<String>,
|
||||
/// Number of thumbnails per video
|
||||
#[arg(short, long, default_value = "6")]
|
||||
count: u32,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
match cli.command {
|
||||
Commands::Register { path } => {
|
||||
println!("Registering: {}", path);
|
||||
|
||||
// Compute UUID
|
||||
let uuid = momentry_core::uuid::compute_uuid_from_path(&path);
|
||||
println!("UUID: {}", uuid);
|
||||
|
||||
// Run ffprobe
|
||||
let probe_result = momentry_core::core::probe::probe_video(&path)?;
|
||||
|
||||
println!("\nVideo probe results:");
|
||||
let duration = probe_result
|
||||
.format
|
||||
.duration
|
||||
.as_ref()
|
||||
.and_then(|s| s.parse::<f64>().ok())
|
||||
.unwrap_or(0.0);
|
||||
println!(" Duration: {}s", duration);
|
||||
if let Some(size) = &probe_result.format.size {
|
||||
println!(" Size: {}", size);
|
||||
}
|
||||
|
||||
let mut width = 0u32;
|
||||
let mut height = 0u32;
|
||||
let mut fps = 0.0;
|
||||
|
||||
for stream in &probe_result.streams {
|
||||
if stream.codec_type.as_deref() == Some("video") {
|
||||
width = stream.width.unwrap_or(0);
|
||||
height = stream.height.unwrap_or(0);
|
||||
if let Some(fps_str) = &stream.r_frame_rate {
|
||||
if let Some((num, den)) = fps_str.split_once('/') {
|
||||
if let (Ok(n), Ok(d)) = (num.parse::<f64>(), den.parse::<f64>()) {
|
||||
if d > 0.0 {
|
||||
fps = n / d;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
println!(" Video: {}x{}", width, height);
|
||||
if let Some(fps) = &stream.r_frame_rate {
|
||||
println!(" FPS: {}", fps);
|
||||
}
|
||||
}
|
||||
if stream.codec_type.as_deref() == Some("audio") {
|
||||
println!(" Audio: {} channels", stream.channels.unwrap_or(0));
|
||||
if let Some(sr) = &stream.sample_rate {
|
||||
println!(" Sample Rate: {}", sr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save probe JSON to file
|
||||
let file_manager = momentry_core::FileManager::new(std::path::PathBuf::from("."));
|
||||
let json_str = serde_json::to_string_pretty(&probe_result)?;
|
||||
let json_path = file_manager.save_json(&uuid, "probe", &json_str)?;
|
||||
println!("\nProbe JSON saved to: {:?}", json_path);
|
||||
|
||||
// Store in PostgreSQL
|
||||
println!("\nStoring in database...");
|
||||
let db = PostgresDb::init().await?;
|
||||
let file_path = Path::new(&path)
|
||||
.canonicalize()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|_| path.clone());
|
||||
let file_name = Path::new(&path)
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
let record = VideoRecord {
|
||||
id: 0,
|
||||
uuid: uuid.clone(),
|
||||
file_path,
|
||||
file_name,
|
||||
duration,
|
||||
width,
|
||||
height,
|
||||
fps,
|
||||
probe_json: Some(json_str),
|
||||
created_at: String::new(),
|
||||
};
|
||||
|
||||
let video_id = db.register_video(&record).await?;
|
||||
println!("Video registered with ID: {}", video_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Commands::Process { target } => {
|
||||
println!("Processing: {}", target);
|
||||
|
||||
// Compute UUID if path is given
|
||||
let uuid = if target.len() == 16 && !target.contains('/') {
|
||||
target.clone()
|
||||
} else {
|
||||
momentry_core::uuid::compute_uuid_from_path(&target)
|
||||
};
|
||||
|
||||
// Get video from database
|
||||
let db = PostgresDb::init().await?;
|
||||
let video = db
|
||||
.get_video_by_uuid(&uuid)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Video not found: {}", uuid))?;
|
||||
|
||||
let video_path = &video.file_path;
|
||||
let file_manager = momentry_core::FileManager::new(std::path::PathBuf::from("."));
|
||||
|
||||
// Process ASR
|
||||
println!("\nRunning ASR...");
|
||||
let asr_path = format!("{}.asr.json", uuid);
|
||||
let asr_result =
|
||||
momentry_core::core::processor::process_asr(video_path, &asr_path).await?;
|
||||
let asr_json = serde_json::to_string_pretty(&asr_result)?;
|
||||
std::fs::write(&asr_path, &asr_json)?;
|
||||
println!("ASR saved to: {}", asr_path);
|
||||
println!(" {} segments found", asr_result.segments.len());
|
||||
|
||||
// TODO: Process OCR, YOLO, Face, Pose, ASRx
|
||||
println!("\nOther processors not yet implemented.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Commands::Chunk { uuid } => {
|
||||
println!("Chunking: {}", uuid);
|
||||
|
||||
let db = PostgresDb::init().await?;
|
||||
let video = db
|
||||
.get_video_by_uuid(&uuid)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Video not found: {}", uuid))?;
|
||||
|
||||
// Read ASR JSON
|
||||
let asr_path = format!("{}.asr.json", uuid);
|
||||
let asr_json = std::fs::read_to_string(&asr_path)
|
||||
.context("ASR file not found. Run 'process' first.")?;
|
||||
|
||||
let asr_result: momentry_core::core::processor::asr::AsrResult =
|
||||
serde_json::from_str(&asr_json)?;
|
||||
|
||||
println!("Processing {} ASR segments...", asr_result.segments.len());
|
||||
|
||||
// Split into sentence chunks
|
||||
let mut sentence_chunks = Vec::new();
|
||||
for (i, seg) in asr_result.segments.iter().enumerate() {
|
||||
let chunk = momentry_core::Chunk::new(
|
||||
uuid.clone(),
|
||||
i as u32,
|
||||
momentry_core::ChunkType::Sentence,
|
||||
seg.start,
|
||||
seg.end,
|
||||
serde_json::json!({
|
||||
"text": seg.text,
|
||||
}),
|
||||
);
|
||||
sentence_chunks.push(chunk);
|
||||
}
|
||||
|
||||
// Split into time-based chunks (10 seconds)
|
||||
let splitter = momentry_core::core::chunk::ChunkSplitter::new(10.0);
|
||||
let time_chunks = splitter.split_time_based(&uuid, video.duration);
|
||||
|
||||
// Store in database
|
||||
println!("Storing {} sentence chunks...", sentence_chunks.len());
|
||||
for chunk in &sentence_chunks {
|
||||
db.store_chunk(chunk).await?;
|
||||
}
|
||||
|
||||
println!("Storing {} time-based chunks...", time_chunks.len());
|
||||
for chunk in &time_chunks {
|
||||
db.store_chunk(chunk).await?;
|
||||
}
|
||||
|
||||
println!(
|
||||
"Done! {} total chunks stored.",
|
||||
sentence_chunks.len() + time_chunks.len()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Commands::Vectorize { uuid } => {
|
||||
println!("Vectorizing: {}", uuid);
|
||||
// TODO: Implement vectorize
|
||||
Ok(())
|
||||
}
|
||||
Commands::Play { target } => {
|
||||
println!("Playing: {}", target);
|
||||
// TODO: Implement play
|
||||
Ok(())
|
||||
}
|
||||
Commands::Watch { directories } => {
|
||||
println!("Starting watcher: {:?}", directories);
|
||||
// TODO: Implement watch
|
||||
Ok(())
|
||||
}
|
||||
Commands::Server { host, port } => {
|
||||
println!("Starting API server at {}:{}", host, port);
|
||||
// TODO: Implement server
|
||||
Ok(())
|
||||
}
|
||||
Commands::Query { query } => {
|
||||
println!("Query: {}", query);
|
||||
// TODO: Implement query
|
||||
Ok(())
|
||||
}
|
||||
Commands::Lookup { path } => {
|
||||
let uuid = momentry_core::uuid::compute_uuid_from_path(&path);
|
||||
println!("Path: {}", path);
|
||||
println!("UUID: {}", uuid);
|
||||
Ok(())
|
||||
}
|
||||
Commands::Resolve { uuid } => {
|
||||
println!("Resolving UUID: {}", uuid);
|
||||
// TODO: Look up path from UUID in database
|
||||
println!("(Database lookup not implemented yet)");
|
||||
Ok(())
|
||||
}
|
||||
Commands::Thumbnails { uuid, count } => {
|
||||
let db = PostgresDb::init().await?;
|
||||
|
||||
let videos = if let Some(ref uuid) = uuid {
|
||||
vec![db
|
||||
.get_video_by_uuid(uuid)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Video not found: {}", uuid))?]
|
||||
} else {
|
||||
db.list_videos().await?
|
||||
};
|
||||
|
||||
let output_dir = std::path::PathBuf::from("thumbnails");
|
||||
let extractor = momentry_core::ThumbnailExtractor::new(output_dir, count);
|
||||
|
||||
for video in videos {
|
||||
println!(
|
||||
"\nGenerating thumbnails for: {} ({})",
|
||||
video.file_name, video.uuid
|
||||
);
|
||||
|
||||
match extractor.get_or_create(&video.file_path, &video.uuid) {
|
||||
Ok(result) => {
|
||||
println!(" Generated {} thumbnails", result.count);
|
||||
}
|
||||
Err(e) => {
|
||||
println!(" Error: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("\nThumbnails generated successfully!");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
3
src/player/mod.rs
Normal file
3
src/player/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod player;
|
||||
|
||||
pub use player::{play_video, PlayerConfig};
|
||||
22
src/player/player.rs
Normal file
22
src/player/player.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use crate::core::overlay::OverlayFlags;
|
||||
|
||||
pub struct PlayerConfig {
|
||||
pub sync_delay_ms: u64,
|
||||
pub overlay_flags: OverlayFlags,
|
||||
}
|
||||
|
||||
impl Default for PlayerConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
sync_delay_ms: 800,
|
||||
overlay_flags: OverlayFlags::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn play_video(path: &str, config: PlayerConfig) -> anyhow::Result<()> {
|
||||
// TODO: Implement video player with overlay
|
||||
// Will integrate existing video_player functionality
|
||||
println!("Playing: {} with config: {:?}", path, config);
|
||||
Ok(())
|
||||
}
|
||||
3
src/watcher/mod.rs
Normal file
3
src/watcher/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod watcher;
|
||||
|
||||
pub use watcher::{watch_directories, WatcherConfig};
|
||||
41
src/watcher/watcher.rs
Normal file
41
src/watcher/watcher.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use anyhow::Result;
|
||||
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
pub struct WatcherConfig {
|
||||
pub directories: Vec<String>,
|
||||
pub poll_interval_ms: u64,
|
||||
}
|
||||
|
||||
impl Default for WatcherConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
directories: vec![],
|
||||
poll_interval_ms: 5000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn watch_directories(config: WatcherConfig, tx: mpsc::Sender<String>) -> Result<()> {
|
||||
// TODO: Implement directory watcher
|
||||
//
|
||||
// Options:
|
||||
// 1. Use notify crate for file system events
|
||||
// 2. Use polling as fallback
|
||||
//
|
||||
// When new video file is detected:
|
||||
// - Send job to Redis queue
|
||||
// - Trigger registration process
|
||||
|
||||
println!("Watching directories: {:?}", config.directories);
|
||||
|
||||
for dir in &config.directories {
|
||||
if Path::new(dir).exists() {
|
||||
println!("Directory exists: {}", dir);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user