feat: add klog

This commit is contained in:
2026-04-29 17:57:41 +09:00
commit f9f009fcd2
18 changed files with 3923 additions and 0 deletions

18
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,18 @@
[package]
name = "klog-backend"
version = "0.1.0"
edition = "2024"
[dependencies]
klog-types = { path = "../types" }
axum = { version = "0.8.9", features = ["multipart"] }
tokio = { version = "1.52.1", features = ["full"] }
serde_json = "1.0.149"
reqwest = { version = "0.13.3", features = ["form", "json"] }
sha2 = "0.11.0"
zip = "8.6.0"
hex = "0.4.3"
tower-http = { version = "0.6.8", features = ["trace"] }
serde = { version = "1.0.228", features = ["derive"] }
tracing = "0.1.44"
tracing-subscriber = { version = "0.3.23", features = ["env-filter", "fmt", "json"] }

50
backend/Dockerfile Normal file
View File

@@ -0,0 +1,50 @@
FROM rust:1.87-slim AS builder
WORKDIR /build
RUN apt-get update && apt-get install -y pkg-config libssl-dev && rm -rf /var/lib/apt/lists/*
# Copy workspace manifests
COPY Cargo.toml Cargo.lock ./
# Copy only the crates needed for klog-backend
COPY crates/ktracing/Cargo.toml crates/ktracing/Cargo.toml
COPY crates/ktracing-subscriber/Cargo.toml crates/ktracing-subscriber/Cargo.toml
COPY klog/backend/Cargo.toml klog/backend/Cargo.toml
COPY klog/klog-cli/Cargo.toml klog/klog-cli/Cargo.toml
COPY klog/types/Cargo.toml klog/types/Cargo.toml
# Stub out every workspace member so Cargo can resolve the graph without full source
RUN find . -name "Cargo.toml" -not -path "./Cargo.toml" | while read f; do \
dir=$(dirname "$f"); \
mkdir -p "$dir/src"; \
echo 'fn main() {}' > "$dir/src/main.rs"; \
touch "$dir/src/lib.rs"; \
done
# Build deps only (cache layer)
RUN cargo build --release -p klog-backend 2>/dev/null || true
# Now copy real source
COPY crates/ktracing/src crates/ktracing/src
COPY crates/ktracing-subscriber/src crates/ktracing-subscriber/src
COPY klog/backend/src klog/backend/src
COPY klog/types/src klog/types/src
# Touch to invalidate cached stubs
RUN find klog/backend/src klog/types/src crates/ktracing/src crates/ktracing-subscriber/src \
-name "*.rs" -exec touch {} +
RUN cargo build --release -p klog-backend
# ── Runtime image ─────────────────────────────────────────────────────────────
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
COPY --from=builder /build/target/release/klog-backend /usr/local/bin/klog-backend
ENV PORT=3000
EXPOSE 3000
CMD ["/usr/local/bin/klog-backend"]

92
backend/src/auth.rs Normal file
View File

@@ -0,0 +1,92 @@
use axum::{
extract::FromRequestParts,
http::{request::Parts, HeaderMap, StatusCode},
};
use std::sync::Arc;
use crate::AppState;
pub struct AuthUser {
pub username: String,
pub is_admin: bool,
}
impl FromRequestParts<Arc<AppState>> for AuthUser {
type Rejection = (StatusCode, String);
async fn from_request_parts(
parts: &mut Parts,
state: &Arc<AppState>,
) -> Result<Self, Self::Rejection> {
let token = extract_bearer(&parts.headers)
.ok_or_else(|| (StatusCode::UNAUTHORIZED, "Missing Bearer token".to_string()))?;
let resp = state
.http_client
.get(format!("{}/api/v1/user", state.config.gitea_url))
.bearer_auth(token)
.send()
.await
.map_err(|e| {
let msg = format!("Gitea /api/v1/user request failed: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
if resp.status() == reqwest::StatusCode::UNAUTHORIZED {
tracing::warn!("Gitea rejected token (401)");
return Err((StatusCode::UNAUTHORIZED, "Invalid token".to_string()));
}
let raw = resp.text().await.map_err(|e| {
let msg = format!("Gitea /api/v1/user read body failed: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
tracing::debug!(body = %raw, "Gitea /api/v1/user response");
let body: serde_json::Value = serde_json::from_str(&raw).map_err(|e| {
let msg = format!("Gitea /api/v1/user parse failed: {e} — body: {raw}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
let username = body["login"]
.as_str()
.ok_or_else(|| {
let msg = format!("No `login` field in /api/v1/user response: {body}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?
.to_string();
let is_admin = username == state.config.admin_username;
if !is_admin {
let users_path = state.config.data_dir.join("users.json");
let raw = tokio::fs::read_to_string(&users_path).await.map_err(|e| {
let msg = format!("Cannot read users.json: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
let users: Vec<String> = serde_json::from_str(&raw).map_err(|e| {
let msg = format!("Invalid users.json: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
if !users.contains(&username) {
tracing::warn!(username, "User not in allowlist");
return Err((StatusCode::FORBIDDEN, "User not in allowlist".to_string()));
}
}
tracing::debug!(username, is_admin, "Auth ok");
Ok(AuthUser { username, is_admin })
}
}
fn extract_bearer(headers: &HeaderMap) -> Option<&str> {
headers
.get("Authorization")?
.to_str()
.ok()?
.strip_prefix("Bearer ")
}

25
backend/src/config.rs Normal file
View File

@@ -0,0 +1,25 @@
use std::path::PathBuf;
pub struct Config {
pub admin_username: String,
pub data_dir: PathBuf,
pub gitea_url: String,
pub port: u16,
}
impl Config {
pub fn from_env() -> Self {
Self {
admin_username: std::env::var("ADMIN_USERNAME").expect("ADMIN_USERNAME required"),
data_dir: PathBuf::from(std::env::var("DATA_DIR").expect("DATA_DIR required")),
gitea_url: std::env::var("GITEA_URL")
.expect("GITEA_URL required")
.trim_end_matches('/')
.to_string(),
port: std::env::var("PORT")
.unwrap_or_else(|_| "3000".to_string())
.parse()
.expect("PORT must be a number"),
}
}
}

151
backend/src/handlers.rs Normal file
View File

@@ -0,0 +1,151 @@
use axum::{
body::Body,
extract::{Multipart, State},
http::{HeaderValue, StatusCode},
response::{IntoResponse, Response},
Json,
};
use std::{io::Write, sync::Arc};
use klog_types::{BatchFilesRequest, FilesMetaResponse};
use serde_json::json;
use crate::{auth::AuthUser, storage, AppState};
pub async fn list_all_files_meta(
State(state): State<Arc<AppState>>,
auth: AuthUser,
) -> Result<Json<FilesMetaResponse>, AppError> {
require_admin(&auth)?;
let files = storage::list_all_files(&state.config.data_dir)
.await
.map_err(AppError::Io)?;
Ok(Json(FilesMetaResponse { files }))
}
pub async fn download_files(
State(state): State<Arc<AppState>>,
auth: AuthUser,
Json(req): Json<BatchFilesRequest>,
) -> Result<Response, AppError> {
require_admin(&auth)?;
let cursor = std::io::Cursor::new(Vec::new());
let mut zip = zip::ZipWriter::new(cursor);
let options = zip::write::SimpleFileOptions::default();
for user_files in &req.users {
for filename in &user_files.files {
if !storage::validate_filename(filename) {
continue;
}
let data = match storage::read_file(&state.config.data_dir, &user_files.username, filename).await {
Ok(d) => d,
Err(_) => continue,
};
let zip_path = format!("{}/{}", user_files.username, filename);
zip.start_file(&zip_path, options)
.map_err(|e| AppError::Internal(e.to_string()))?;
zip.write_all(&data)
.map_err(|e| AppError::Internal(e.to_string()))?;
}
}
let cursor = zip.finish().map_err(|e| AppError::Internal(e.to_string()))?;
let bytes = cursor.into_inner();
let mut response = Response::new(Body::from(bytes));
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/zip"));
Ok(response)
}
pub async fn delete_files(
State(state): State<Arc<AppState>>,
auth: AuthUser,
Json(req): Json<BatchFilesRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
require_admin(&auth)?;
let mut deleted = 0u32;
for user_files in &req.users {
for filename in &user_files.files {
if !storage::validate_filename(filename) {
continue;
}
if storage::delete_file(&state.config.data_dir, &user_files.username, filename)
.await
.unwrap_or(false)
{
deleted += 1;
}
}
}
Ok(Json(json!({ "deleted": deleted })))
}
pub async fn upload_file(
State(state): State<Arc<AppState>>,
auth: AuthUser,
mut multipart: Multipart,
) -> Result<Json<serde_json::Value>, AppError> {
let mut uploaded: Vec<String> = Vec::new();
while let Some(field) = multipart
.next_field()
.await
.map_err(|e| AppError::BadRequest(e.to_string()))?
{
let filename = field
.file_name()
.or_else(|| field.name())
.ok_or_else(|| AppError::BadRequest("Missing filename".to_string()))?
.to_string();
if !storage::validate_filename(&filename) {
return Err(AppError::BadRequest(format!("Invalid filename: {filename}")));
}
let data = field.bytes().await.map_err(|e| AppError::BadRequest(e.to_string()))?;
storage::write_file(&state.config.data_dir, &auth.username, &filename, &data)
.await
.map_err(AppError::Io)?;
uploaded.push(filename);
}
Ok(Json(json!({ "uploaded": uploaded })))
}
fn require_admin(auth: &AuthUser) -> Result<(), AppError> {
if auth.is_admin {
Ok(())
} else {
Err(AppError::Forbidden)
}
}
#[derive(Debug)]
pub enum AppError {
Forbidden,
BadRequest(String),
Internal(String),
Io(std::io::Error),
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let (status, msg) = match self {
AppError::Forbidden => (StatusCode::FORBIDDEN, "Forbidden".to_string()),
AppError::BadRequest(m) => (StatusCode::BAD_REQUEST, m),
AppError::Internal(m) => (StatusCode::INTERNAL_SERVER_ERROR, m),
AppError::Io(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
};
if status.is_server_error() {
tracing::error!(status = status.as_u16(), "{msg}");
} else {
tracing::warn!(status = status.as_u16(), "{msg}");
}
(status, msg).into_response()
}
}

42
backend/src/main.rs Normal file
View File

@@ -0,0 +1,42 @@
mod auth;
mod config;
mod handlers;
mod storage;
use std::sync::Arc;
use axum::{
routing::{delete, get, post},
Router,
};
pub struct AppState {
pub config: config::Config,
pub http_client: reqwest::Client,
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
let config = config::Config::from_env();
let port = config.port;
let state = Arc::new(AppState {
config,
http_client: reqwest::Client::new(),
});
let app = Router::new()
.route("/admin/files", get(handlers::list_all_files_meta))
.route("/admin/files/get", post(handlers::download_files))
.route("/admin/files", delete(handlers::delete_files))
.route("/files", post(handlers::upload_file))
.with_state(state);
let addr = format!("0.0.0.0:{port}");
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
tracing::info!("Listening on {addr}");
axum::serve(listener, app).await.unwrap();
}

74
backend/src/storage.rs Normal file
View File

@@ -0,0 +1,74 @@
use std::{io, path::{Path, PathBuf}};
use sha2::{Sha256, Digest};
use klog_types::FileInfo;
pub fn files_root(data_dir: &Path) -> PathBuf {
data_dir.join("files")
}
pub fn validate_filename(name: &str) -> bool {
!name.is_empty()
&& name != "."
&& name != ".."
&& !name.contains('/')
&& !name.contains('\\')
&& !name.contains('\0')
}
pub async fn list_all_files(data_dir: &Path) -> io::Result<Vec<FileInfo>> {
let root = files_root(data_dir);
let mut result = Vec::new();
let mut user_dirs = match tokio::fs::read_dir(&root).await {
Ok(d) => d,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(result),
Err(e) => return Err(e),
};
while let Some(user_entry) = user_dirs.next_entry().await? {
if !user_entry.file_type().await?.is_dir() {
continue;
}
let username = user_entry.file_name().to_string_lossy().to_string();
let user_dir = root.join(&username);
let mut files = tokio::fs::read_dir(&user_dir).await?;
while let Some(file_entry) = files.next_entry().await? {
if !file_entry.file_type().await?.is_file() {
continue;
}
let filename = file_entry.file_name().to_string_lossy().to_string();
let path = user_dir.join(&filename);
let sha256 = hash_file(&path).await?;
result.push(FileInfo { username: username.clone(), filename, sha256 });
}
}
Ok(result)
}
pub async fn hash_file(path: &Path) -> io::Result<String> {
let data = tokio::fs::read(path).await?;
let mut hasher = Sha256::new();
hasher.update(&data);
Ok(hex::encode(hasher.finalize()))
}
pub async fn read_file(data_dir: &Path, username: &str, filename: &str) -> io::Result<Vec<u8>> {
tokio::fs::read(files_root(data_dir).join(username).join(filename)).await
}
pub async fn write_file(data_dir: &Path, username: &str, filename: &str, data: &[u8]) -> io::Result<()> {
let dir = files_root(data_dir).join(username);
tokio::fs::create_dir_all(&dir).await?;
tokio::fs::write(dir.join(filename), data).await
}
pub async fn delete_file(data_dir: &Path, username: &str, filename: &str) -> io::Result<bool> {
let path = files_root(data_dir).join(username).join(filename);
match tokio::fs::remove_file(&path).await {
Ok(()) => Ok(true),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e),
}
}