#![allow(clippy::too_many_arguments)] use crate::auth::{www_authenticate, AccessPaths, AccessPerm}; use crate::streamer::Streamer; use crate::utils::{ decode_uri, encode_uri, get_file_mtime_and_mode, get_file_name, glob, try_get_file_name, }; use crate::Args; use anyhow::{anyhow, Result}; use walkdir::WalkDir; use xml::escape::escape_str_pcdata; use async_zip::tokio::write::ZipFileWriter; use async_zip::{Compression, ZipDateTime, ZipEntryBuilder}; use chrono::{LocalResult, TimeZone, Utc}; use futures::TryStreamExt; use headers::{ AcceptRanges, AccessControlAllowCredentials, AccessControlAllowOrigin, CacheControl, ContentLength, ContentType, ETag, HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, LastModified, Range, }; use hyper::header::{ HeaderValue, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE, RANGE, }; use hyper::{Body, Method, StatusCode, Uri}; use serde::Serialize; use std::borrow::Cow; use std::cmp::Ordering; use std::collections::HashMap; use std::fs::Metadata; use std::io::SeekFrom; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use std::sync::atomic::{self, AtomicBool}; use std::sync::Arc; use std::time::SystemTime; use tokio::fs::File; use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite}; use tokio::{fs, io}; use tokio_util::compat::FuturesAsyncWriteCompatExt; use tokio_util::io::StreamReader; use uuid::Uuid; pub type Request = hyper::Request; pub type Response = hyper::Response; const INDEX_HTML: &str = include_str!("../assets/index.html"); const INDEX_CSS: &str = include_str!("../assets/index.css"); const INDEX_JS: &str = include_str!("../assets/index.js"); const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico"); const INDEX_NAME: &str = "index.html"; const BUF_SIZE: usize = 65536; const TEXT_MAX_SIZE: u64 = 4194304; // 4M pub struct Server { args: Arc, assets_prefix: String, html: Cow<'static, str>, single_file_req_paths: Vec, running: Arc, } impl Server { pub fn init(args: Arc, running: Arc) -> Result { let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION")); let single_file_req_paths = if args.path_is_file { vec![ args.uri_prefix.to_string(), args.uri_prefix[0..args.uri_prefix.len() - 1].to_string(), encode_uri(&format!( "{}{}", &args.uri_prefix, get_file_name(&args.serve_path) )), ] } else { vec![] }; let html = match args.assets.as_ref() { Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html"))?), None => Cow::Borrowed(INDEX_HTML), }; Ok(Self { args, running, single_file_req_paths, assets_prefix, html, }) } pub async fn call( self: Arc, req: Request, addr: Option, ) -> Result { let uri = req.uri().clone(); let assets_prefix = &self.assets_prefix; let enable_cors = self.args.enable_cors; let mut http_log_data = self.args.http_logger.data(&req); if let Some(addr) = addr { http_log_data.insert("remote_addr".to_string(), addr.ip().to_string()); } let mut res = match self.clone().handle(req).await { Ok(res) => { http_log_data.insert("status".to_string(), res.status().as_u16().to_string()); if !uri.path().starts_with(assets_prefix) { self.args.http_logger.log(&http_log_data, None); } res } Err(err) => { let mut res = Response::default(); let status = StatusCode::INTERNAL_SERVER_ERROR; *res.status_mut() = status; http_log_data.insert("status".to_string(), status.as_u16().to_string()); self.args .http_logger .log(&http_log_data, Some(err.to_string())); res } }; if enable_cors { add_cors(&mut res); } Ok(res) } pub async fn handle(self: Arc, req: Request) -> Result { let mut res = Response::default(); let req_path = req.uri().path(); let headers = req.headers(); let method = req.method().clone(); if method == Method::GET && self.handle_assets(req_path, headers, &mut res).await? { return Ok(res); } let authorization = headers.get(AUTHORIZATION); let relative_path = match self.resolve_path(req_path) { Some(v) => v, None => { status_forbid(&mut res); return Ok(res); } }; let guard = self.args.auth.guard(&relative_path, &method, authorization); let (user, access_paths) = match guard { (None, None) => { self.auth_reject(&mut res)?; return Ok(res); } (Some(_), None) => { status_forbid(&mut res); return Ok(res); } (x, Some(y)) => (x, y), }; let query = req.uri().query().unwrap_or_default(); let query_params: HashMap = form_urlencoded::parse(query.as_bytes()) .map(|(k, v)| (k.to_string(), v.to_string())) .collect(); if method.as_str() == "WRITEABLE" { return Ok(res); } let head_only = method == Method::HEAD; if self.args.path_is_file { if self .single_file_req_paths .iter() .any(|v| v.as_str() == req_path) { self.handle_send_file(&self.args.serve_path, headers, head_only, &mut res) .await?; } else { status_not_found(&mut res); } return Ok(res); } let path = match self.join_path(&relative_path) { Some(v) => v, None => { status_forbid(&mut res); return Ok(res); } }; let path = path.as_path(); let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() { Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()), None => (true, false, false, 0), }; let allow_upload = self.args.allow_upload; let allow_delete = self.args.allow_delete; let allow_search = self.args.allow_search; let allow_archive = self.args.allow_archive; let render_index = self.args.render_index; let render_spa = self.args.render_spa; let render_try_index = self.args.render_try_index; if !self.args.allow_symlink && !is_miss && !self.is_root_contained(path).await { status_not_found(&mut res); return Ok(res); } match method { Method::GET | Method::HEAD => { if is_dir { if render_try_index { if allow_archive && query_params.contains_key("zip") { if !allow_archive { status_not_found(&mut res); return Ok(res); } self.handle_zip_dir(path, head_only, access_paths, &mut res) .await?; } else if allow_search && query_params.contains_key("q") { self.handle_search_dir( path, &query_params, head_only, user, access_paths, &mut res, ) .await?; } else { self.handle_render_index( path, &query_params, headers, head_only, user, access_paths, &mut res, ) .await?; } } else if render_index || render_spa { self.handle_render_index( path, &query_params, headers, head_only, user, access_paths, &mut res, ) .await?; } else if query_params.contains_key("zip") { if !allow_archive { status_not_found(&mut res); return Ok(res); } self.handle_zip_dir(path, head_only, access_paths, &mut res) .await?; } else if allow_search && query_params.contains_key("q") { self.handle_search_dir( path, &query_params, head_only, user, access_paths, &mut res, ) .await?; } else { self.handle_ls_dir( path, true, &query_params, head_only, user, access_paths, &mut res, ) .await?; } } else if is_file { if query_params.contains_key("edit") { self.handle_deal_file(path, DataKind::Edit, head_only, user, &mut res) .await?; } else if query_params.contains_key("view") { self.handle_deal_file(path, DataKind::View, head_only, user, &mut res) .await?; } else { self.handle_send_file(path, headers, head_only, &mut res) .await?; } } else if render_spa { self.handle_render_spa(path, headers, head_only, &mut res) .await?; } else if allow_upload && req_path.ends_with('/') { self.handle_ls_dir( path, false, &query_params, head_only, user, access_paths, &mut res, ) .await?; } else { status_not_found(&mut res); } } Method::OPTIONS => { set_webdav_headers(&mut res); } Method::PUT => { if !allow_upload || (!allow_delete && is_file && size > 0) { status_forbid(&mut res); } else { self.handle_upload(path, req, &mut res).await?; } } Method::DELETE => { if !allow_delete { status_forbid(&mut res); } else if !is_miss { self.handle_delete(path, is_dir, &mut res).await? } else { status_not_found(&mut res); } } method => match method.as_str() { "PROPFIND" => { if is_dir { let access_paths = if access_paths.perm().indexonly() { // see https://github.com/sigoden/dufs/issues/229 AccessPaths::new(AccessPerm::ReadOnly) } else { access_paths }; self.handle_propfind_dir(path, headers, access_paths, &mut res) .await?; } else if is_file { self.handle_propfind_file(path, &mut res).await?; } else { status_not_found(&mut res); } } "PROPPATCH" => { if is_file { self.handle_proppatch(req_path, &mut res).await?; } else { status_not_found(&mut res); } } "MKCOL" => { if !allow_upload { status_forbid(&mut res); } else if !is_miss { *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED; *res.body_mut() = Body::from("Already exists"); } else { self.handle_mkcol(path, &mut res).await?; } } "COPY" => { if !allow_upload { status_forbid(&mut res); } else if is_miss { status_not_found(&mut res); } else { self.handle_copy(path, &req, &mut res).await? } } "MOVE" => { if !allow_upload || !allow_delete { status_forbid(&mut res); } else if is_miss { status_not_found(&mut res); } else { self.handle_move(path, &req, &mut res).await? } } "LOCK" => { // Fake lock if is_file { let has_auth = authorization.is_some(); self.handle_lock(req_path, has_auth, &mut res).await?; } else { status_not_found(&mut res); } } "UNLOCK" => { // Fake unlock if is_miss { status_not_found(&mut res); } } _ => { *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED; } }, } Ok(res) } async fn handle_upload(&self, path: &Path, mut req: Request, res: &mut Response) -> Result<()> { ensure_path_parent(path).await?; let mut file = match fs::File::create(&path).await { Ok(v) => v, Err(_) => { status_forbid(res); return Ok(()); } }; let body_with_io_error = req .body_mut() .map_err(|err| io::Error::new(io::ErrorKind::Other, err)); let body_reader = StreamReader::new(body_with_io_error); futures::pin_mut!(body_reader); let ret = io::copy(&mut body_reader, &mut file).await; if ret.is_err() { tokio::fs::remove_file(&path).await?; ret?; } *res.status_mut() = StatusCode::CREATED; Ok(()) } async fn handle_delete(&self, path: &Path, is_dir: bool, res: &mut Response) -> Result<()> { match is_dir { true => fs::remove_dir_all(path).await?, false => fs::remove_file(path).await?, } status_no_content(res); Ok(()) } async fn handle_ls_dir( &self, path: &Path, exist: bool, query_params: &HashMap, head_only: bool, user: Option, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { let mut paths = vec![]; if exist { paths = match self.list_dir(path, path, access_paths.clone()).await { Ok(paths) => paths, Err(_) => { status_forbid(res); return Ok(()); } } }; self.send_index( path, paths, exist, query_params, head_only, user, access_paths, res, ) } async fn handle_search_dir( &self, path: &Path, query_params: &HashMap, head_only: bool, user: Option, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { let mut paths: Vec = vec![]; let search = query_params .get("q") .ok_or_else(|| anyhow!("invalid q"))? .to_lowercase(); if search.is_empty() { return self .handle_ls_dir(path, true, query_params, head_only, user, access_paths, res) .await; } else { let path_buf = path.to_path_buf(); let hidden = Arc::new(self.args.hidden.to_vec()); let hidden = hidden.clone(); let running = self.running.clone(); let access_paths = access_paths.clone(); let search_paths = tokio::task::spawn_blocking(move || { let mut paths: Vec = vec![]; for dir in access_paths.leaf_paths(&path_buf) { let mut it = WalkDir::new(&dir).into_iter(); it.next(); while let Some(Ok(entry)) = it.next() { if !running.load(atomic::Ordering::SeqCst) { break; } let entry_path = entry.path(); let base_name = get_file_name(entry_path); let file_type = entry.file_type(); let mut is_dir_type: bool = file_type.is_dir(); if file_type.is_symlink() { match std::fs::symlink_metadata(entry_path) { Ok(meta) => { is_dir_type = meta.is_dir(); } Err(_) => { continue; } } } if is_hidden(&hidden, base_name, is_dir_type) { if file_type.is_dir() { it.skip_current_dir(); } continue; } if !base_name.to_lowercase().contains(&search) { continue; } paths.push(entry_path.to_path_buf()); } } paths }) .await?; for search_path in search_paths.into_iter() { if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await { paths.push(item); } } } self.send_index( path, paths, true, query_params, head_only, user, access_paths, res, ) } async fn handle_zip_dir( &self, path: &Path, head_only: bool, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { let (mut writer, reader) = tokio::io::duplex(BUF_SIZE); let filename = try_get_file_name(path)?; set_content_diposition(res, false, &format!("{}.zip", filename))?; res.headers_mut() .insert("content-type", HeaderValue::from_static("application/zip")); if head_only { return Ok(()); } let path = path.to_owned(); let hidden = self.args.hidden.clone(); let running = self.running.clone(); tokio::spawn(async move { if let Err(e) = zip_dir(&mut writer, &path, access_paths, &hidden, running).await { error!("Failed to zip {}, {}", path.display(), e); } }); let reader = Streamer::new(reader, BUF_SIZE); *res.body_mut() = Body::wrap_stream(reader.into_stream()); Ok(()) } async fn handle_render_index( &self, path: &Path, query_params: &HashMap, headers: &HeaderMap, head_only: bool, user: Option, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { let index_path = path.join(INDEX_NAME); if fs::metadata(&index_path) .await .ok() .map(|v| v.is_file()) .unwrap_or_default() { self.handle_send_file(&index_path, headers, head_only, res) .await?; } else if self.args.render_try_index { self.handle_ls_dir(path, true, query_params, head_only, user, access_paths, res) .await?; } else { status_not_found(res) } Ok(()) } async fn handle_render_spa( &self, path: &Path, headers: &HeaderMap, head_only: bool, res: &mut Response, ) -> Result<()> { if path.extension().is_none() { let path = self.args.serve_path.join(INDEX_NAME); self.handle_send_file(&path, headers, head_only, res) .await?; } else { status_not_found(res) } Ok(()) } async fn handle_assets( &self, req_path: &str, headers: &HeaderMap, res: &mut Response, ) -> Result { if let Some(name) = req_path.strip_prefix(&self.assets_prefix) { match self.args.assets.as_ref() { Some(assets_path) => { let path = assets_path.join(name); self.handle_send_file(&path, headers, false, res).await?; } None => match name { "index.js" => { *res.body_mut() = Body::from(INDEX_JS); res.headers_mut().insert( "content-type", HeaderValue::from_static("application/javascript; charset=UTF-8"), ); } "index.css" => { *res.body_mut() = Body::from(INDEX_CSS); res.headers_mut().insert( "content-type", HeaderValue::from_static("text/css; charset=UTF-8"), ); } "favicon.ico" => { *res.body_mut() = Body::from(FAVICON_ICO); res.headers_mut() .insert("content-type", HeaderValue::from_static("image/x-icon")); } _ => { status_not_found(res); } }, } res.headers_mut().insert( "cache-control", HeaderValue::from_static("public, max-age=31536000, immutable"), ); res.headers_mut().insert( "x-content-type-options", HeaderValue::from_static("nosniff"), ); Ok(true) } else { Ok(false) } } async fn handle_send_file( &self, path: &Path, headers: &HeaderMap, head_only: bool, res: &mut Response, ) -> Result<()> { let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),); let (mut file, meta) = (file?, meta?); let mut use_range = true; if let Some((etag, last_modified)) = extract_cache_headers(&meta) { let cached = { if let Some(if_none_match) = headers.typed_get::() { !if_none_match.precondition_passes(&etag) } else if let Some(if_modified_since) = headers.typed_get::() { !if_modified_since.is_modified(last_modified.into()) } else { false } }; if cached { *res.status_mut() = StatusCode::NOT_MODIFIED; return Ok(()); } res.headers_mut().typed_insert(last_modified); res.headers_mut().typed_insert(etag.clone()); if headers.typed_get::().is_some() { use_range = headers .typed_get::() .map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified))) // Always be fresh if there is no validators .unwrap_or(true); } else { use_range = false; } } let range = if use_range { parse_range(headers) } else { None }; res.headers_mut().insert( CONTENT_TYPE, HeaderValue::from_str(&get_content_type(path).await?)?, ); let filename = try_get_file_name(path)?; set_content_diposition(res, true, filename)?; res.headers_mut().typed_insert(AcceptRanges::bytes()); let size = meta.len(); if let Some(range) = range { if range .end .map_or_else(|| range.start < size, |v| v >= range.start) && file.seek(SeekFrom::Start(range.start)).await.is_ok() { let end = range.end.unwrap_or(size - 1).min(size - 1); let part_size = end - range.start + 1; let reader = Streamer::new(file, BUF_SIZE); *res.status_mut() = StatusCode::PARTIAL_CONTENT; let content_range = format!("bytes {}-{}/{}", range.start, end, size); res.headers_mut() .insert(CONTENT_RANGE, content_range.parse()?); res.headers_mut() .insert(CONTENT_LENGTH, format!("{part_size}").parse()?); if head_only { return Ok(()); } *res.body_mut() = Body::wrap_stream(reader.into_stream_sized(part_size)); } else { *res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE; res.headers_mut() .insert(CONTENT_RANGE, format!("bytes */{size}").parse()?); } } else { res.headers_mut() .insert(CONTENT_LENGTH, format!("{size}").parse()?); if head_only { return Ok(()); } let reader = Streamer::new(file, BUF_SIZE); *res.body_mut() = Body::wrap_stream(reader.into_stream()); } Ok(()) } async fn handle_deal_file( &self, path: &Path, kind: DataKind, head_only: bool, user: Option, res: &mut Response, ) -> Result<()> { let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),); let (file, meta) = (file?, meta?); let href = format!( "/{}", normalize_path(path.strip_prefix(&self.args.serve_path)?) ); let mut buffer: Vec = vec![]; file.take(1024).read_to_end(&mut buffer).await?; let editable = meta.len() <= TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text(); let data = EditData { href, kind, uri_prefix: self.args.uri_prefix.clone(), allow_upload: self.args.allow_upload, allow_delete: self.args.allow_delete, auth: self.args.auth.exist(), user, editable, }; res.headers_mut() .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); let output = self .html .replace("__ASSETS_PREFIX__", &self.assets_prefix) .replace("__INDEX_DATA__", &serde_json::to_string(&data)?); res.headers_mut() .typed_insert(ContentLength(output.as_bytes().len() as u64)); if head_only { return Ok(()); } *res.body_mut() = output.into(); Ok(()) } async fn handle_propfind_dir( &self, path: &Path, headers: &HeaderMap, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { let depth: u32 = match headers.get("depth") { Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) { Some(v) => v, None => { *res.status_mut() = StatusCode::BAD_REQUEST; return Ok(()); } }, None => 1, }; let mut paths = match self.to_pathitem(path, &self.args.serve_path).await? { Some(v) => vec![v], None => vec![], }; if depth != 0 { match self .list_dir(path, &self.args.serve_path, access_paths) .await { Ok(child) => paths.extend(child), Err(_) => { status_forbid(res); return Ok(()); } } } let output = paths .iter() .map(|v| v.to_dav_xml(self.args.uri_prefix.as_str())) .fold(String::new(), |mut acc, v| { acc.push_str(&v); acc }); res_multistatus(res, &output); Ok(()) } async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> Result<()> { if let Some(pathitem) = self.to_pathitem(path, &self.args.serve_path).await? { res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str())); } else { status_not_found(res); } Ok(()) } async fn handle_mkcol(&self, path: &Path, res: &mut Response) -> Result<()> { fs::create_dir_all(path).await?; *res.status_mut() = StatusCode::CREATED; Ok(()) } async fn handle_copy(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> { let dest = match self.extract_dest(req, res) { Some(dest) => dest, None => { return Ok(()); } }; let meta = fs::symlink_metadata(path).await?; if meta.is_dir() { status_forbid(res); return Ok(()); } ensure_path_parent(&dest).await?; fs::copy(path, &dest).await?; status_no_content(res); Ok(()) } async fn handle_move(&self, path: &Path, req: &Request, res: &mut Response) -> Result<()> { let dest = match self.extract_dest(req, res) { Some(dest) => dest, None => { return Ok(()); } }; ensure_path_parent(&dest).await?; fs::rename(path, &dest).await?; status_no_content(res); Ok(()) } async fn handle_lock(&self, req_path: &str, auth: bool, res: &mut Response) -> Result<()> { let token = if auth { format!("opaquelocktoken:{}", Uuid::new_v4()) } else { Utc::now().timestamp().to_string() }; res.headers_mut().insert( "content-type", HeaderValue::from_static("application/xml; charset=utf-8"), ); res.headers_mut() .insert("lock-token", format!("<{token}>").parse()?); *res.body_mut() = Body::from(format!( r#" {token} {req_path} "# )); Ok(()) } async fn handle_proppatch(&self, req_path: &str, res: &mut Response) -> Result<()> { let output = format!( r#" {req_path} HTTP/1.1 403 Forbidden "# ); res_multistatus(res, &output); Ok(()) } #[allow(clippy::too_many_arguments)] fn send_index( &self, path: &Path, mut paths: Vec, exist: bool, query_params: &HashMap, head_only: bool, user: Option, access_paths: AccessPaths, res: &mut Response, ) -> Result<()> { if let Some(sort) = query_params.get("sort") { if sort == "name" { paths.sort_by(|v1, v2| v1.sort_by_name(v2)) } else if sort == "mtime" { paths.sort_by(|v1, v2| v1.sort_by_mtime(v2)) } else if sort == "size" { paths.sort_by(|v1, v2| v1.sort_by_size(v2)) } if query_params .get("order") .map(|v| v == "desc") .unwrap_or_default() { paths.reverse() } } else { paths.sort_by(|v1, v2| v1.sort_by_name(v2)) } if query_params.contains_key("simple") { let output = paths .into_iter() .map(|v| { if v.is_dir() { format!("{}/\n", v.name) } else { format!("{}\n", v.name) } }) .collect::>() .join(""); res.headers_mut() .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); res.headers_mut() .typed_insert(ContentLength(output.as_bytes().len() as u64)); *res.body_mut() = output.into(); if head_only { return Ok(()); } return Ok(()); } let href = format!( "/{}", normalize_path(path.strip_prefix(&self.args.serve_path)?) ); let readwrite = access_paths.perm().readwrite(); let data = IndexData { kind: DataKind::Index, href, uri_prefix: self.args.uri_prefix.clone(), allow_upload: self.args.allow_upload && readwrite, allow_delete: self.args.allow_delete && readwrite, allow_search: self.args.allow_search, allow_archive: self.args.allow_archive, dir_exists: exist, auth: self.args.auth.exist(), user, paths, }; let output = if query_params.contains_key("json") { res.headers_mut() .typed_insert(ContentType::from(mime_guess::mime::APPLICATION_JSON)); serde_json::to_string_pretty(&data)? } else { res.headers_mut() .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); self.html .replace("__ASSETS_PREFIX__", &self.assets_prefix) .replace("__INDEX_DATA__", &serde_json::to_string(&data)?) }; res.headers_mut() .typed_insert(ContentLength(output.as_bytes().len() as u64)); res.headers_mut() .typed_insert(CacheControl::new().with_no_cache()); res.headers_mut().insert( "x-content-type-options", HeaderValue::from_static("nosniff"), ); if head_only { return Ok(()); } *res.body_mut() = output.into(); Ok(()) } fn auth_reject(&self, res: &mut Response) -> Result<()> { set_webdav_headers(res); www_authenticate(res, &self.args)?; *res.status_mut() = StatusCode::UNAUTHORIZED; Ok(()) } async fn is_root_contained(&self, path: &Path) -> bool { fs::canonicalize(path) .await .ok() .map(|v| v.starts_with(&self.args.serve_path)) .unwrap_or_default() } fn extract_dest(&self, req: &Request, res: &mut Response) -> Option { let headers = req.headers(); let dest_path = match self.extract_destination_header(headers) { Some(dest) => dest, None => { *res.status_mut() = StatusCode::BAD_REQUEST; return None; } }; let relative_path = match self.resolve_path(&dest_path) { Some(v) => v, None => { *res.status_mut() = StatusCode::BAD_REQUEST; return None; } }; let authorization = headers.get(AUTHORIZATION); let guard = self .args .auth .guard(&relative_path, req.method(), authorization); match guard { (_, Some(_)) => {} _ => { status_forbid(res); return None; } }; let dest = match self.join_path(&relative_path) { Some(dest) => dest, None => { *res.status_mut() = StatusCode::BAD_REQUEST; return None; } }; Some(dest) } fn extract_destination_header(&self, headers: &HeaderMap) -> Option { let dest = headers.get("Destination")?.to_str().ok()?; let uri: Uri = dest.parse().ok()?; Some(uri.path().to_string()) } fn resolve_path(&self, path: &str) -> Option { let path = path.trim_matches('/'); let path = decode_uri(path)?; let prefix = self.args.path_prefix.as_str(); if prefix == "/" { return Some(path.to_string()); } path.strip_prefix(prefix.trim_start_matches('/')) .map(|v| v.trim_matches('/').to_string()) } fn join_path(&self, path: &str) -> Option { if path.is_empty() { return Some(self.args.serve_path.clone()); } let path = if cfg!(windows) { path.replace('/', "\\") } else { path.to_string() }; Some(self.args.serve_path.join(path)) } async fn list_dir( &self, entry_path: &Path, base_path: &Path, access_paths: AccessPaths, ) -> Result> { let mut paths: Vec = vec![]; if access_paths.perm().indexonly() { for name in access_paths.child_paths() { let entry_path = entry_path.join(name); self.add_pathitem(&mut paths, base_path, &entry_path).await; } } else { let mut rd = fs::read_dir(entry_path).await?; while let Ok(Some(entry)) = rd.next_entry().await { let entry_path = entry.path(); self.add_pathitem(&mut paths, base_path, &entry_path).await; } } Ok(paths) } async fn add_pathitem(&self, paths: &mut Vec, base_path: &Path, entry_path: &Path) { let base_name = get_file_name(entry_path); if let Ok(Some(item)) = self.to_pathitem(entry_path, base_path).await { if is_hidden(&self.args.hidden, base_name, item.is_dir()) { return; } paths.push(item); } } async fn to_pathitem>(&self, path: P, base_path: P) -> Result> { let path = path.as_ref(); let (meta, meta2) = tokio::join!(fs::metadata(&path), fs::symlink_metadata(&path)); let (meta, meta2) = (meta?, meta2?); let is_symlink = meta2.is_symlink(); if !self.args.allow_symlink && is_symlink && !self.is_root_contained(path).await { return Ok(None); } let is_dir = meta.is_dir(); let path_type = match (is_symlink, is_dir) { (true, true) => PathType::SymlinkDir, (false, true) => PathType::Dir, (true, false) => PathType::SymlinkFile, (false, false) => PathType::File, }; let mtime = to_timestamp(&meta.modified()?); let size = match path_type { PathType::Dir | PathType::SymlinkDir => None, PathType::File | PathType::SymlinkFile => Some(meta.len()), }; let rel_path = path.strip_prefix(base_path)?; let name = normalize_path(rel_path); Ok(Some(PathItem { path_type, name, mtime, size, })) } } #[derive(Debug, Serialize, PartialEq)] enum DataKind { Index, Edit, View, } #[derive(Debug, Serialize)] struct IndexData { href: String, kind: DataKind, uri_prefix: String, allow_upload: bool, allow_delete: bool, allow_search: bool, allow_archive: bool, dir_exists: bool, auth: bool, user: Option, paths: Vec, } #[derive(Debug, Serialize)] struct EditData { href: String, kind: DataKind, uri_prefix: String, allow_upload: bool, allow_delete: bool, auth: bool, user: Option, editable: bool, } #[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)] struct PathItem { path_type: PathType, name: String, mtime: u64, size: Option, } impl PathItem { pub fn is_dir(&self) -> bool { self.path_type == PathType::Dir || self.path_type == PathType::SymlinkDir } pub fn to_dav_xml(&self, prefix: &str) -> String { let mtime = match Utc.timestamp_millis_opt(self.mtime as i64) { LocalResult::Single(v) => v.to_rfc2822(), _ => String::new(), }; let mut href = encode_uri(&format!("{}{}", prefix, &self.name)); if self.is_dir() && !href.ends_with('/') { href.push('/'); } let displayname = escape_str_pcdata(self.base_name()); match self.path_type { PathType::Dir | PathType::SymlinkDir => format!( r#" {href} {displayname} {mtime} HTTP/1.1 200 OK "# ), PathType::File | PathType::SymlinkFile => format!( r#" {} {} {} {} HTTP/1.1 200 OK "#, href, displayname, self.size.unwrap_or_default(), mtime ), } } pub fn base_name(&self) -> &str { self.name.split('/').last().unwrap_or_default() } pub fn sort_by_name(&self, other: &Self) -> Ordering { match self.path_type.cmp(&other.path_type) { Ordering::Equal => { alphanumeric_sort::compare_str(self.name.to_lowercase(), other.name.to_lowercase()) } v => v, } } pub fn sort_by_mtime(&self, other: &Self) -> Ordering { match self.path_type.cmp(&other.path_type) { Ordering::Equal => self.mtime.cmp(&other.mtime), v => v, } } pub fn sort_by_size(&self, other: &Self) -> Ordering { match self.path_type.cmp(&other.path_type) { Ordering::Equal => { if self.is_dir() { alphanumeric_sort::compare_str( self.name.to_lowercase(), other.name.to_lowercase(), ) } else { self.size.unwrap_or(0).cmp(&other.size.unwrap_or(0)) } } v => v, } } } #[derive(Debug, Serialize, Eq, PartialEq)] enum PathType { Dir, SymlinkDir, File, SymlinkFile, } impl Ord for PathType { fn cmp(&self, other: &Self) -> Ordering { let to_value = |t: &Self| -> u8 { if matches!(t, Self::Dir | Self::SymlinkDir) { 0 } else { 1 } }; to_value(self).cmp(&to_value(other)) } } impl PartialOrd for PathType { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } fn to_timestamp(time: &SystemTime) -> u64 { time.duration_since(SystemTime::UNIX_EPOCH) .unwrap_or_default() .as_millis() as u64 } fn normalize_path>(path: P) -> String { let path = path.as_ref().to_str().unwrap_or_default(); if cfg!(windows) { path.replace('\\', "/") } else { path.to_string() } } async fn ensure_path_parent(path: &Path) -> Result<()> { if let Some(parent) = path.parent() { if fs::symlink_metadata(parent).await.is_err() { fs::create_dir_all(&parent).await?; } } Ok(()) } fn add_cors(res: &mut Response) { res.headers_mut() .typed_insert(AccessControlAllowOrigin::ANY); res.headers_mut() .typed_insert(AccessControlAllowCredentials); res.headers_mut().insert( "Access-Control-Allow-Methods", HeaderValue::from_static("*"), ); res.headers_mut().insert( "Access-Control-Allow-Headers", HeaderValue::from_static("Authorization,*"), ); res.headers_mut().insert( "Access-Control-Expose-Headers", HeaderValue::from_static("Authorization,*"), ); } fn res_multistatus(res: &mut Response, content: &str) { *res.status_mut() = StatusCode::MULTI_STATUS; res.headers_mut().insert( "content-type", HeaderValue::from_static("application/xml; charset=utf-8"), ); *res.body_mut() = Body::from(format!( r#" {content} "#, )); } async fn zip_dir( writer: &mut W, dir: &Path, access_paths: AccessPaths, hidden: &[String], running: Arc, ) -> Result<()> { let mut writer = ZipFileWriter::with_tokio(writer); let hidden = Arc::new(hidden.to_vec()); let hidden = hidden.clone(); let dir_clone = dir.to_path_buf(); let zip_paths = tokio::task::spawn_blocking(move || { let mut paths: Vec = vec![]; for dir in access_paths.leaf_paths(&dir_clone) { let mut it = WalkDir::new(&dir).into_iter(); it.next(); while let Some(Ok(entry)) = it.next() { if !running.load(atomic::Ordering::SeqCst) { break; } let entry_path = entry.path(); let base_name = get_file_name(entry_path); let file_type = entry.file_type(); let mut is_dir_type: bool = file_type.is_dir(); if file_type.is_symlink() { match std::fs::symlink_metadata(entry_path) { Ok(meta) => { is_dir_type = meta.is_dir(); } Err(_) => { continue; } } } if is_hidden(&hidden, base_name, is_dir_type) { if file_type.is_dir() { it.skip_current_dir(); } continue; } if entry.path().symlink_metadata().is_err() { continue; } if !file_type.is_file() { continue; } paths.push(entry_path.to_path_buf()); } } paths }) .await?; for zip_path in zip_paths.into_iter() { let filename = match zip_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) { Some(v) => v, None => continue, }; let (datetime, mode) = get_file_mtime_and_mode(&zip_path).await?; let builder = ZipEntryBuilder::new(filename.into(), Compression::Deflate) .unix_permissions(mode) .last_modification_date(ZipDateTime::from_chrono(&datetime)); let mut file = File::open(&zip_path).await?; let mut file_writer = writer.write_entry_stream(builder).await?.compat_write(); io::copy(&mut file, &mut file_writer).await?; file_writer.into_inner().close().await?; } writer.close().await?; Ok(()) } fn extract_cache_headers(meta: &Metadata) -> Option<(ETag, LastModified)> { let mtime = meta.modified().ok()?; let timestamp = to_timestamp(&mtime); let size = meta.len(); let etag = format!(r#""{timestamp}-{size}""#).parse::().ok()?; let last_modified = LastModified::from(mtime); Some((etag, last_modified)) } #[derive(Debug)] struct RangeValue { start: u64, end: Option, } fn parse_range(headers: &HeaderMap) -> Option { let range_hdr = headers.get(RANGE)?; let hdr = range_hdr.to_str().ok()?; let mut sp = hdr.splitn(2, '='); let units = sp.next()?; if units == "bytes" { let range = sp.next()?; let mut sp_range = range.splitn(2, '-'); let start: u64 = sp_range.next()?.parse().ok()?; let end: Option = if let Some(end) = sp_range.next() { if end.is_empty() { None } else { Some(end.parse().ok()?) } } else { None }; Some(RangeValue { start, end }) } else { None } } fn status_forbid(res: &mut Response) { *res.status_mut() = StatusCode::FORBIDDEN; *res.body_mut() = Body::from("Forbidden"); } fn status_not_found(res: &mut Response) { *res.status_mut() = StatusCode::NOT_FOUND; *res.body_mut() = Body::from("Not Found"); } fn status_no_content(res: &mut Response) { *res.status_mut() = StatusCode::NO_CONTENT; } fn set_content_diposition(res: &mut Response, inline: bool, filename: &str) -> Result<()> { let kind = if inline { "inline" } else { "attachment" }; let value = if filename.is_ascii() { HeaderValue::from_str(&format!("{kind}; filename=\"{}\"", filename,))? } else { HeaderValue::from_str(&format!( "{kind}; filename=\"{}\"; filename*=UTF-8''{}", filename, encode_uri(filename), ))? }; res.headers_mut().insert(CONTENT_DISPOSITION, value); Ok(()) } fn is_hidden(hidden: &[String], file_name: &str, is_dir_type: bool) -> bool { hidden.iter().any(|v| { if is_dir_type { if let Some(x) = v.strip_suffix('/') { return glob(x, file_name); } } glob(v, file_name) }) } fn set_webdav_headers(res: &mut Response) { res.headers_mut().insert( "Allow", HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"), ); res.headers_mut() .insert("DAV", HeaderValue::from_static("1,2")); } async fn get_content_type(path: &Path) -> Result { let mut buffer: Vec = vec![]; fs::File::open(path) .await? .take(1024) .read_to_end(&mut buffer) .await?; let mime = mime_guess::from_path(path).first(); let is_text = content_inspector::inspect(&buffer).is_text(); let content_type = if is_text { let mut detector = chardetng::EncodingDetector::new(); detector.feed(&buffer, buffer.len() < 1024); let (enc, confident) = detector.guess_assess(None, true); let charset = if confident { format!("; charset={}", enc.name()) } else { "".into() }; match mime { Some(m) => format!("{m}{charset}"), None => format!("text/plain{charset}"), } } else { match mime { Some(m) => m.to_string(), None => "application/octet-stream".into(), } }; Ok(content_type) }