feat: add klog

This commit is contained in:
2026-04-29 17:57:41 +09:00
commit f9f009fcd2
18 changed files with 3923 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
target
.env
.direnv

2933
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

6
Cargo.toml Normal file
View File

@@ -0,0 +1,6 @@
[workspace]
resolver = "2"
members = ["types", "cli", "backend"]
[workspace.dependencies]

18
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,18 @@
[package]
name = "klog-backend"
version = "0.1.0"
edition = "2024"
[dependencies]
klog-types = { path = "../types" }
axum = { version = "0.8.9", features = ["multipart"] }
tokio = { version = "1.52.1", features = ["full"] }
serde_json = "1.0.149"
reqwest = { version = "0.13.3", features = ["form", "json"] }
sha2 = "0.11.0"
zip = "8.6.0"
hex = "0.4.3"
tower-http = { version = "0.6.8", features = ["trace"] }
serde = { version = "1.0.228", features = ["derive"] }
tracing = "0.1.44"
tracing-subscriber = { version = "0.3.23", features = ["env-filter", "fmt", "json"] }

50
backend/Dockerfile Normal file
View File

@@ -0,0 +1,50 @@
FROM rust:1.87-slim AS builder
WORKDIR /build
RUN apt-get update && apt-get install -y pkg-config libssl-dev && rm -rf /var/lib/apt/lists/*
# Copy workspace manifests
COPY Cargo.toml Cargo.lock ./
# Copy only the crates needed for klog-backend
COPY crates/ktracing/Cargo.toml crates/ktracing/Cargo.toml
COPY crates/ktracing-subscriber/Cargo.toml crates/ktracing-subscriber/Cargo.toml
COPY klog/backend/Cargo.toml klog/backend/Cargo.toml
COPY klog/klog-cli/Cargo.toml klog/klog-cli/Cargo.toml
COPY klog/types/Cargo.toml klog/types/Cargo.toml
# Stub out every workspace member so Cargo can resolve the graph without full source
RUN find . -name "Cargo.toml" -not -path "./Cargo.toml" | while read f; do \
dir=$(dirname "$f"); \
mkdir -p "$dir/src"; \
echo 'fn main() {}' > "$dir/src/main.rs"; \
touch "$dir/src/lib.rs"; \
done
# Build deps only (cache layer)
RUN cargo build --release -p klog-backend 2>/dev/null || true
# Now copy real source
COPY crates/ktracing/src crates/ktracing/src
COPY crates/ktracing-subscriber/src crates/ktracing-subscriber/src
COPY klog/backend/src klog/backend/src
COPY klog/types/src klog/types/src
# Touch to invalidate cached stubs
RUN find klog/backend/src klog/types/src crates/ktracing/src crates/ktracing-subscriber/src \
-name "*.rs" -exec touch {} +
RUN cargo build --release -p klog-backend
# ── Runtime image ─────────────────────────────────────────────────────────────
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
COPY --from=builder /build/target/release/klog-backend /usr/local/bin/klog-backend
ENV PORT=3000
EXPOSE 3000
CMD ["/usr/local/bin/klog-backend"]

92
backend/src/auth.rs Normal file
View File

@@ -0,0 +1,92 @@
use axum::{
extract::FromRequestParts,
http::{request::Parts, HeaderMap, StatusCode},
};
use std::sync::Arc;
use crate::AppState;
pub struct AuthUser {
pub username: String,
pub is_admin: bool,
}
impl FromRequestParts<Arc<AppState>> for AuthUser {
type Rejection = (StatusCode, String);
async fn from_request_parts(
parts: &mut Parts,
state: &Arc<AppState>,
) -> Result<Self, Self::Rejection> {
let token = extract_bearer(&parts.headers)
.ok_or_else(|| (StatusCode::UNAUTHORIZED, "Missing Bearer token".to_string()))?;
let resp = state
.http_client
.get(format!("{}/api/v1/user", state.config.gitea_url))
.bearer_auth(token)
.send()
.await
.map_err(|e| {
let msg = format!("Gitea /api/v1/user request failed: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
if resp.status() == reqwest::StatusCode::UNAUTHORIZED {
tracing::warn!("Gitea rejected token (401)");
return Err((StatusCode::UNAUTHORIZED, "Invalid token".to_string()));
}
let raw = resp.text().await.map_err(|e| {
let msg = format!("Gitea /api/v1/user read body failed: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
tracing::debug!(body = %raw, "Gitea /api/v1/user response");
let body: serde_json::Value = serde_json::from_str(&raw).map_err(|e| {
let msg = format!("Gitea /api/v1/user parse failed: {e} — body: {raw}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
let username = body["login"]
.as_str()
.ok_or_else(|| {
let msg = format!("No `login` field in /api/v1/user response: {body}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?
.to_string();
let is_admin = username == state.config.admin_username;
if !is_admin {
let users_path = state.config.data_dir.join("users.json");
let raw = tokio::fs::read_to_string(&users_path).await.map_err(|e| {
let msg = format!("Cannot read users.json: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
let users: Vec<String> = serde_json::from_str(&raw).map_err(|e| {
let msg = format!("Invalid users.json: {e}");
tracing::error!("{msg}");
(StatusCode::INTERNAL_SERVER_ERROR, msg)
})?;
if !users.contains(&username) {
tracing::warn!(username, "User not in allowlist");
return Err((StatusCode::FORBIDDEN, "User not in allowlist".to_string()));
}
}
tracing::debug!(username, is_admin, "Auth ok");
Ok(AuthUser { username, is_admin })
}
}
fn extract_bearer(headers: &HeaderMap) -> Option<&str> {
headers
.get("Authorization")?
.to_str()
.ok()?
.strip_prefix("Bearer ")
}

25
backend/src/config.rs Normal file
View File

@@ -0,0 +1,25 @@
use std::path::PathBuf;
pub struct Config {
pub admin_username: String,
pub data_dir: PathBuf,
pub gitea_url: String,
pub port: u16,
}
impl Config {
pub fn from_env() -> Self {
Self {
admin_username: std::env::var("ADMIN_USERNAME").expect("ADMIN_USERNAME required"),
data_dir: PathBuf::from(std::env::var("DATA_DIR").expect("DATA_DIR required")),
gitea_url: std::env::var("GITEA_URL")
.expect("GITEA_URL required")
.trim_end_matches('/')
.to_string(),
port: std::env::var("PORT")
.unwrap_or_else(|_| "3000".to_string())
.parse()
.expect("PORT must be a number"),
}
}
}

151
backend/src/handlers.rs Normal file
View File

@@ -0,0 +1,151 @@
use axum::{
body::Body,
extract::{Multipart, State},
http::{HeaderValue, StatusCode},
response::{IntoResponse, Response},
Json,
};
use std::{io::Write, sync::Arc};
use klog_types::{BatchFilesRequest, FilesMetaResponse};
use serde_json::json;
use crate::{auth::AuthUser, storage, AppState};
pub async fn list_all_files_meta(
State(state): State<Arc<AppState>>,
auth: AuthUser,
) -> Result<Json<FilesMetaResponse>, AppError> {
require_admin(&auth)?;
let files = storage::list_all_files(&state.config.data_dir)
.await
.map_err(AppError::Io)?;
Ok(Json(FilesMetaResponse { files }))
}
pub async fn download_files(
State(state): State<Arc<AppState>>,
auth: AuthUser,
Json(req): Json<BatchFilesRequest>,
) -> Result<Response, AppError> {
require_admin(&auth)?;
let cursor = std::io::Cursor::new(Vec::new());
let mut zip = zip::ZipWriter::new(cursor);
let options = zip::write::SimpleFileOptions::default();
for user_files in &req.users {
for filename in &user_files.files {
if !storage::validate_filename(filename) {
continue;
}
let data = match storage::read_file(&state.config.data_dir, &user_files.username, filename).await {
Ok(d) => d,
Err(_) => continue,
};
let zip_path = format!("{}/{}", user_files.username, filename);
zip.start_file(&zip_path, options)
.map_err(|e| AppError::Internal(e.to_string()))?;
zip.write_all(&data)
.map_err(|e| AppError::Internal(e.to_string()))?;
}
}
let cursor = zip.finish().map_err(|e| AppError::Internal(e.to_string()))?;
let bytes = cursor.into_inner();
let mut response = Response::new(Body::from(bytes));
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/zip"));
Ok(response)
}
pub async fn delete_files(
State(state): State<Arc<AppState>>,
auth: AuthUser,
Json(req): Json<BatchFilesRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
require_admin(&auth)?;
let mut deleted = 0u32;
for user_files in &req.users {
for filename in &user_files.files {
if !storage::validate_filename(filename) {
continue;
}
if storage::delete_file(&state.config.data_dir, &user_files.username, filename)
.await
.unwrap_or(false)
{
deleted += 1;
}
}
}
Ok(Json(json!({ "deleted": deleted })))
}
pub async fn upload_file(
State(state): State<Arc<AppState>>,
auth: AuthUser,
mut multipart: Multipart,
) -> Result<Json<serde_json::Value>, AppError> {
let mut uploaded: Vec<String> = Vec::new();
while let Some(field) = multipart
.next_field()
.await
.map_err(|e| AppError::BadRequest(e.to_string()))?
{
let filename = field
.file_name()
.or_else(|| field.name())
.ok_or_else(|| AppError::BadRequest("Missing filename".to_string()))?
.to_string();
if !storage::validate_filename(&filename) {
return Err(AppError::BadRequest(format!("Invalid filename: {filename}")));
}
let data = field.bytes().await.map_err(|e| AppError::BadRequest(e.to_string()))?;
storage::write_file(&state.config.data_dir, &auth.username, &filename, &data)
.await
.map_err(AppError::Io)?;
uploaded.push(filename);
}
Ok(Json(json!({ "uploaded": uploaded })))
}
fn require_admin(auth: &AuthUser) -> Result<(), AppError> {
if auth.is_admin {
Ok(())
} else {
Err(AppError::Forbidden)
}
}
#[derive(Debug)]
pub enum AppError {
Forbidden,
BadRequest(String),
Internal(String),
Io(std::io::Error),
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let (status, msg) = match self {
AppError::Forbidden => (StatusCode::FORBIDDEN, "Forbidden".to_string()),
AppError::BadRequest(m) => (StatusCode::BAD_REQUEST, m),
AppError::Internal(m) => (StatusCode::INTERNAL_SERVER_ERROR, m),
AppError::Io(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
};
if status.is_server_error() {
tracing::error!(status = status.as_u16(), "{msg}");
} else {
tracing::warn!(status = status.as_u16(), "{msg}");
}
(status, msg).into_response()
}
}

42
backend/src/main.rs Normal file
View File

@@ -0,0 +1,42 @@
mod auth;
mod config;
mod handlers;
mod storage;
use std::sync::Arc;
use axum::{
routing::{delete, get, post},
Router,
};
pub struct AppState {
pub config: config::Config,
pub http_client: reqwest::Client,
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
let config = config::Config::from_env();
let port = config.port;
let state = Arc::new(AppState {
config,
http_client: reqwest::Client::new(),
});
let app = Router::new()
.route("/admin/files", get(handlers::list_all_files_meta))
.route("/admin/files/get", post(handlers::download_files))
.route("/admin/files", delete(handlers::delete_files))
.route("/files", post(handlers::upload_file))
.with_state(state);
let addr = format!("0.0.0.0:{port}");
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
tracing::info!("Listening on {addr}");
axum::serve(listener, app).await.unwrap();
}

74
backend/src/storage.rs Normal file
View File

@@ -0,0 +1,74 @@
use std::{io, path::{Path, PathBuf}};
use sha2::{Sha256, Digest};
use klog_types::FileInfo;
pub fn files_root(data_dir: &Path) -> PathBuf {
data_dir.join("files")
}
pub fn validate_filename(name: &str) -> bool {
!name.is_empty()
&& name != "."
&& name != ".."
&& !name.contains('/')
&& !name.contains('\\')
&& !name.contains('\0')
}
pub async fn list_all_files(data_dir: &Path) -> io::Result<Vec<FileInfo>> {
let root = files_root(data_dir);
let mut result = Vec::new();
let mut user_dirs = match tokio::fs::read_dir(&root).await {
Ok(d) => d,
Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(result),
Err(e) => return Err(e),
};
while let Some(user_entry) = user_dirs.next_entry().await? {
if !user_entry.file_type().await?.is_dir() {
continue;
}
let username = user_entry.file_name().to_string_lossy().to_string();
let user_dir = root.join(&username);
let mut files = tokio::fs::read_dir(&user_dir).await?;
while let Some(file_entry) = files.next_entry().await? {
if !file_entry.file_type().await?.is_file() {
continue;
}
let filename = file_entry.file_name().to_string_lossy().to_string();
let path = user_dir.join(&filename);
let sha256 = hash_file(&path).await?;
result.push(FileInfo { username: username.clone(), filename, sha256 });
}
}
Ok(result)
}
pub async fn hash_file(path: &Path) -> io::Result<String> {
let data = tokio::fs::read(path).await?;
let mut hasher = Sha256::new();
hasher.update(&data);
Ok(hex::encode(hasher.finalize()))
}
pub async fn read_file(data_dir: &Path, username: &str, filename: &str) -> io::Result<Vec<u8>> {
tokio::fs::read(files_root(data_dir).join(username).join(filename)).await
}
pub async fn write_file(data_dir: &Path, username: &str, filename: &str, data: &[u8]) -> io::Result<()> {
let dir = files_root(data_dir).join(username);
tokio::fs::create_dir_all(&dir).await?;
tokio::fs::write(dir.join(filename), data).await
}
pub async fn delete_file(data_dir: &Path, username: &str, filename: &str) -> io::Result<bool> {
let path = files_root(data_dir).join(username).join(filename);
match tokio::fs::remove_file(&path).await {
Ok(()) => Ok(true),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e),
}
}

19
cli/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "klog"
version = "0.1.0"
edition = "2024"
[dependencies]
klog-types = { path = "../types" }
clap = { version = "4.6.1", features = ["derive", "env"] }
tokio = { version = "1.52.1", features = ["full"] }
reqwest = { version = "0.13.3", features = ["form", "json", "multipart"] }
serde_json = "1.0.149"
sha2 = "0.11.0"
hex = "0.4.3"
zip = "8.6.0"
walkdir = "2.5.0"
anyhow = "1.0.102"
rand = "0.10.1"
base64 = "0.22.1"
serde = { version = "1.0.228", features = ["derive"] }

79
cli/src/client.rs Normal file
View File

@@ -0,0 +1,79 @@
use anyhow::{Context, Result};
use klog_types::{BatchFilesRequest, FilesMetaResponse};
use crate::config::Config;
pub struct KlogClient {
base_url: String,
token: String,
client: reqwest::Client,
}
impl KlogClient {
pub fn new(config: &Config) -> Self {
Self {
base_url: config.url.trim_end_matches('/').to_string(),
token: config.token.clone(),
client: reqwest::Client::new(),
}
}
pub async fn list_files(&self) -> Result<FilesMetaResponse> {
Ok(self
.client
.get(format!("{}/admin/files", self.base_url))
.bearer_auth(&self.token)
.send()
.await?
.error_for_status()?
.json::<FilesMetaResponse>()
.await?)
}
pub async fn get_files(&self, req: &BatchFilesRequest) -> Result<Vec<u8>> {
Ok(self
.client
.post(format!("{}/admin/files/get", self.base_url))
.bearer_auth(&self.token)
.json(req)
.send()
.await?
.error_for_status()?
.bytes()
.await?
.to_vec())
}
pub async fn upload_file(&self, path: &std::path::Path) -> Result<serde_json::Value> {
let filename = path
.file_name()
.context("Invalid path: no filename")?
.to_string_lossy()
.to_string();
let data = tokio::fs::read(path).await?;
let part = reqwest::multipart::Part::bytes(data).file_name(filename);
let form = reqwest::multipart::Form::new().part("file", part);
Ok(self
.client
.post(format!("{}/files", self.base_url))
.bearer_auth(&self.token)
.multipart(form)
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?)
}
pub async fn delete_files(&self, req: &BatchFilesRequest) -> Result<serde_json::Value> {
Ok(self
.client
.delete(format!("{}/admin/files", self.base_url))
.bearer_auth(&self.token)
.json(req)
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?)
}
}

21
cli/src/config.rs Normal file
View File

@@ -0,0 +1,21 @@
use anyhow::Result;
use crate::login;
pub struct Config {
pub url: String,
pub token: String,
}
impl Config {
pub fn from_env() -> Result<Self> {
let url = match std::env::var("KLOG_URL") {
Ok(u) => u,
Err(_) => login::load_url()?,
};
let token = match std::env::var("KLOG_TOKEN") {
Ok(t) => t,
Err(_) => login::load_token()?,
};
Ok(Self { url, token })
}
}

180
cli/src/login.rs Normal file
View File

@@ -0,0 +1,180 @@
use anyhow::{Context, Result};
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use rand::Rng;
use sha2::{Digest, Sha256};
use std::path::PathBuf;
use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::TcpListener,
};
pub const DEFAULT_GITEA_URL: &str = "https://git.walruslab.org";
const CLIENT_ID: &str = "b1b43f28-66b0-4577-b622-897f57decc26";
const REDIRECT_URI: &str = "http://localhost:20130/callback";
pub async fn login(gitea_url: &str) -> Result<()> {
let verifier = code_verifier();
let challenge = code_challenge(&verifier);
let state = random_state();
let mut auth_url = reqwest::Url::parse(&format!("{gitea_url}/login/oauth/authorize"))
.context("Invalid Gitea URL")?;
auth_url
.query_pairs_mut()
.append_pair("client_id", CLIENT_ID)
.append_pair("redirect_uri", REDIRECT_URI)
.append_pair("response_type", "code")
.append_pair("state", &state)
.append_pair("code_challenge", &challenge)
.append_pair("code_challenge_method", "S256");
println!("Open this URL in your browser to authenticate:\n");
println!(" {auth_url}\n");
println!("Waiting for callback on {REDIRECT_URI}...");
let code = wait_for_callback(&state).await?;
let token = exchange_code(gitea_url, &code, &verifier).await?;
save_token(&token)?;
println!("Login successful. Token saved to {}", config_path()?.display());
Ok(())
}
async fn wait_for_callback(expected_state: &str) -> Result<String> {
let listener = TcpListener::bind("127.0.0.1:20130")
.await
.context("Cannot bind to port 20130 — is another process using it?")?;
let (mut stream, _) = listener.accept().await?;
let mut buf = vec![0u8; 4096];
let n = stream.read(&mut buf).await?;
let request = String::from_utf8_lossy(&buf[..n]);
// First line: GET /callback?code=...&state=... HTTP/1.1
let first_line = request.lines().next().unwrap_or_default();
let path = first_line.split_whitespace().nth(1).unwrap_or_default();
let query = path.split_once('?').map(|(_, q)| q).unwrap_or_default();
let mut code = None;
let mut got_state = None;
for pair in query.split('&') {
if let Some((k, v)) = pair.split_once('=') {
match k {
"code" => code = Some(v.to_string()),
"state" => got_state = Some(v.to_string()),
_ => {}
}
}
}
let _ = stream
.write_all(
b"HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n\r\n\
<h2>Login successful!</h2><p>You can close this tab.</p>",
)
.await;
let code = code.context("No code in OAuth callback")?;
let got_state = got_state.context("No state in OAuth callback")?;
if got_state != expected_state {
anyhow::bail!("State mismatch — possible CSRF attack");
}
Ok(code)
}
async fn exchange_code(gitea_url: &str, code: &str, verifier: &str) -> Result<String> {
let resp: serde_json::Value = reqwest::Client::new()
.post(format!("{gitea_url}/login/oauth/access_token"))
.header("Accept", "application/json")
.form(&[
("client_id", CLIENT_ID),
("code", code),
("code_verifier", verifier),
("grant_type", "authorization_code"),
("redirect_uri", REDIRECT_URI),
])
.send()
.await?
.error_for_status()?
.json()
.await?;
resp["access_token"]
.as_str()
.with_context(|| format!("No access_token in response: {resp}"))
.map(str::to_string)
}
pub fn config_path() -> Result<PathBuf> {
let home = std::env::var("HOME").context("HOME not set")?;
Ok(PathBuf::from(home).join(".config").join("klog.json"))
}
fn read_config_json() -> serde_json::Value {
config_path()
.ok()
.and_then(|p| std::fs::read_to_string(p).ok())
.and_then(|s| serde_json::from_str(&s).ok())
.unwrap_or_default()
}
fn write_config_json(json: &serde_json::Value) -> Result<()> {
let path = config_path()?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&path, serde_json::to_string_pretty(json)?)?;
Ok(())
}
pub fn save_token(token: &str) -> Result<()> {
let mut json = read_config_json();
json["token"] = serde_json::Value::String(token.to_string());
write_config_json(&json)
}
pub fn save_url(url: &str) -> Result<()> {
let mut json = read_config_json();
json["url"] = serde_json::Value::String(url.to_string());
write_config_json(&json)
}
pub fn load_token() -> Result<String> {
let json = read_config_json();
json["token"]
.as_str()
.with_context(|| format!(
"token missing from {}. Run `klog login` first.",
config_path().map(|p| p.display().to_string()).unwrap_or_default()
))
.map(str::to_string)
}
pub fn load_url() -> Result<String> {
let json = read_config_json();
json["url"]
.as_str()
.with_context(|| format!(
"url missing from {}. Run `klog set-url <url>` first.",
config_path().map(|p| p.display().to_string()).unwrap_or_default()
))
.map(str::to_string)
}
fn code_verifier() -> String {
let mut bytes = [0u8; 32];
rand::rng().fill_bytes(&mut bytes);
URL_SAFE_NO_PAD.encode(bytes)
}
fn code_challenge(verifier: &str) -> String {
URL_SAFE_NO_PAD.encode(Sha256::digest(verifier.as_bytes()))
}
fn random_state() -> String {
let mut bytes = [0u8; 16];
rand::rng().fill_bytes(&mut bytes);
URL_SAFE_NO_PAD.encode(bytes)
}

104
cli/src/main.rs Normal file
View File

@@ -0,0 +1,104 @@
mod client;
mod config;
mod login;
mod sync;
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use std::path::PathBuf;
use klog_types::{BatchFilesRequest, UserFiles};
#[derive(Parser)]
#[command(name = "klog", version, about = "klog file sync CLI")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Authenticate via Gitea OAuth2 and save token to ~/.config/klog.json
Login {
/// Gitea instance URL
#[arg(long, default_value = login::DEFAULT_GITEA_URL)]
gitea_url: String,
},
/// Save the klog server URL to ~/.config/klog.json
SetUrl {
url: String,
},
/// Upload a file to klog
Upload {
/// Path to the file to upload
file: PathBuf,
},
/// Sync remote files to local log dir
Sync {
#[arg(short = 'l', long, default_value = "./klogs")]
log_dir: PathBuf,
},
/// List remote files
Ls {
/// Filter by username (default: all users)
username: Option<String>,
},
/// Remove a remote file (format: username/filename)
Rm {
target: String,
},
}
#[tokio::main]
async fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Login { gitea_url } => {
login::login(&gitea_url).await?;
}
Commands::SetUrl { url } => {
login::save_url(&url)?;
println!("URL saved to {}", login::config_path()?.display());
}
Commands::Upload { file } => {
let client = client::KlogClient::new(&config::Config::from_env()?);
let result = client.upload_file(&file).await?;
println!("Uploaded: {:?}", result["uploaded"]);
}
Commands::Sync { log_dir } => {
let client = client::KlogClient::new(&config::Config::from_env()?);
sync::sync(&client, &log_dir).await?;
}
Commands::Ls { username } => {
let client = client::KlogClient::new(&config::Config::from_env()?);
let meta = client.list_files().await?;
let rows: Vec<_> = meta
.files
.iter()
.filter(|f| username.as_ref().map_or(true, |u| &f.username == u))
.collect();
println!("{:<20} {:<40} SHA256", "USERNAME", "FILENAME");
for f in rows {
println!("{:<20} {:<40} {}", f.username, f.filename, &f.sha256[..8]);
}
}
Commands::Rm { target } => {
let client = client::KlogClient::new(&config::Config::from_env()?);
let (username, filename) = target
.split_once('/')
.context("Expected format: username/filename")?;
let req = BatchFilesRequest {
users: vec![UserFiles {
username: username.to_string(),
files: vec![filename.to_string()],
}],
};
let result = client.delete_files(&req).await?;
println!("Deleted: {}", result["deleted"]);
}
}
Ok(())
}

95
cli/src/sync.rs Normal file
View File

@@ -0,0 +1,95 @@
use anyhow::Result;
use sha2::{Digest, Sha256};
use std::{collections::HashMap, io, path::Path};
use klog_types::{BatchFilesRequest, UserFiles};
use crate::client::KlogClient;
pub async fn sync(client: &KlogClient, log_dir: &Path) -> Result<()> {
let meta = client.list_files().await?;
let local = scan_local(log_dir)?;
let mut to_fetch: HashMap<String, Vec<String>> = HashMap::new();
for file in &meta.files {
let local_hash = local.get(&(file.username.clone(), file.filename.clone()));
if local_hash.map(|h| h.as_str()) != Some(file.sha256.as_str()) {
to_fetch
.entry(file.username.clone())
.or_default()
.push(file.filename.clone());
}
}
if to_fetch.is_empty() {
println!("Already up to date.");
return Ok(());
}
let total: usize = to_fetch.values().map(|v| v.len()).sum();
println!("Fetching {total} file(s)...");
let req = BatchFilesRequest {
users: to_fetch
.into_iter()
.map(|(username, files)| UserFiles { username, files })
.collect(),
};
let zip_bytes = client.get_files(&req).await?;
extract_zip(&zip_bytes, log_dir)?;
println!("Sync complete.");
Ok(())
}
fn scan_local(log_dir: &Path) -> Result<HashMap<(String, String), String>> {
let mut map = HashMap::new();
if !log_dir.exists() {
return Ok(map);
}
for user_entry in std::fs::read_dir(log_dir)? {
let user_entry = user_entry?;
if !user_entry.file_type()?.is_dir() {
continue;
}
let username = user_entry.file_name().to_string_lossy().to_string();
let user_dir = log_dir.join(&username);
for file_entry in std::fs::read_dir(&user_dir)? {
let file_entry = file_entry?;
if !file_entry.file_type()?.is_file() {
continue;
}
let filename = file_entry.file_name().to_string_lossy().to_string();
let data = std::fs::read(file_entry.path())?;
let mut hasher = Sha256::new();
hasher.update(&data);
map.insert((username.clone(), filename), hex::encode(hasher.finalize()));
}
}
Ok(map)
}
fn extract_zip(zip_bytes: &[u8], log_dir: &Path) -> io::Result<()> {
let cursor = std::io::Cursor::new(zip_bytes);
let mut archive = zip::ZipArchive::new(cursor)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
for i in 0..archive.len() {
let mut file = archive
.by_index(i)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let out_path = log_dir.join(file.name());
if let Some(parent) = out_path.parent() {
std::fs::create_dir_all(parent)?;
}
let mut out = std::fs::File::create(&out_path)?;
std::io::copy(&mut file, &mut out)?;
}
Ok(())
}

7
types/Cargo.toml Normal file
View File

@@ -0,0 +1,7 @@
[package]
name = "klog-types"
version = "0.1.0"
edition = "2024"
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }

24
types/src/lib.rs Normal file
View File

@@ -0,0 +1,24 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileInfo {
pub username: String,
pub filename: String,
pub sha256: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FilesMetaResponse {
pub files: Vec<FileInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserFiles {
pub username: String,
pub files: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct BatchFilesRequest {
pub users: Vec<UserFiles>,
}