summaryrefslogtreecommitdiff
path: root/src/files.rs
diff options
context:
space:
mode:
authormetamuffin <metamuffin@disroot.org>2023-12-07 14:35:48 +0100
committermetamuffin <metamuffin@disroot.org>2023-12-07 14:35:48 +0100
commit6566cbb3f25aa8b1247c259b5e546910b6044f93 (patch)
treee94dd775fc1fd90b4ea7b272d871e71118f102f6 /src/files.rs
parentab0d780062bff88d4fbcdd2c91ad5352c0d6279f (diff)
downloadgnix-6566cbb3f25aa8b1247c259b5e546910b6044f93.tar
gnix-6566cbb3f25aa8b1247c259b5e546910b6044f93.tar.bz2
gnix-6566cbb3f25aa8b1247c259b5e546910b6044f93.tar.zst
move some files around and add horrible access log
Diffstat (limited to 'src/files.rs')
-rw-r--r--src/files.rs278
1 files changed, 0 insertions, 278 deletions
diff --git a/src/files.rs b/src/files.rs
deleted file mode 100644
index 733d045..0000000
--- a/src/files.rs
+++ /dev/null
@@ -1,278 +0,0 @@
-use crate::{config::FileserverConfig, ServiceError};
-use bytes::{Bytes, BytesMut};
-use futures_util::{future, future::Either, ready, stream, FutureExt, Stream, StreamExt};
-use headers::{AcceptRanges, ContentLength, ContentRange, ContentType, HeaderMapExt};
-use http_body_util::{combinators::BoxBody, BodyExt, StreamBody};
-use humansize::FormatSizeOptions;
-use hyper::{
- body::{Frame, Incoming},
- header::{CONTENT_TYPE, LOCATION},
- http::HeaderValue,
- Request, Response, StatusCode,
-};
-use log::debug;
-use markup::Render;
-use percent_encoding::percent_decode_str;
-use std::{fs::Metadata, io, ops::Range, path::Path, pin::Pin, task::Poll};
-use tokio::{
- fs::{read_to_string, File},
- io::AsyncSeekExt,
-};
-use tokio_util::io::poll_read_buf;
-
-pub async fn serve_files(
- req: &Request<Incoming>,
- config: &FileserverConfig,
-) -> Result<hyper::Response<BoxBody<Bytes, ServiceError>>, ServiceError> {
- let rpath = req.uri().path();
-
- let mut path = config.root.clone();
- let mut user_path_depth = 0;
- for seg in rpath.split("/") {
- let seg = percent_decode_str(seg).decode_utf8()?;
-
- if seg == "" || seg == "." {
- continue;
- }
-
- if seg == ".." {
- if user_path_depth <= 0 {
- return Err(ServiceError::BadPath);
- }
- path.pop();
- user_path_depth -= 1;
- } else {
- path.push(seg.as_ref());
- user_path_depth += 1;
- }
- }
- if !path.exists() {
- return Err(ServiceError::NotFound);
- }
-
- let metadata = path.metadata()?;
-
- if metadata.file_type().is_dir() {
- debug!("sending index for {path:?}");
- if let Ok(indexhtml) = read_to_string(path.join("index.html")).await {
- return Ok(html_string_response(indexhtml));
- }
-
- if config.index {
- if !rpath.ends_with("/") {
- let mut r = Response::new(String::new());
- *r.status_mut() = StatusCode::FOUND;
- r.headers_mut().insert(
- LOCATION,
- HeaderValue::from_str(&format!("{}/", rpath))
- .map_err(|_| ServiceError::Other)?,
- );
- return Ok(r.map(|b| b.map_err(|e| match e {}).boxed()));
- }
-
- return index(&path, rpath.to_string())
- .await
- .map(html_string_response);
- } else {
- return Err(ServiceError::NotFound);
- }
- }
-
- let range = req.headers().typed_get::<headers::Range>();
- let range = bytes_range(range, metadata.len())?;
-
- debug!("sending file {path:?}");
- let file = File::open(path.clone()).await?;
-
- let mut r = Response::new(BoxBody::new(StreamBody::new(
- StreamBody::new(file_stream(file, 4096, range.clone()))
- .map(|e| e.map(|e| Frame::data(e)).map_err(ServiceError::Io)),
- )));
-
- if range.end - range.start != metadata.len() {
- *r.status_mut() = StatusCode::PARTIAL_CONTENT;
- r.headers_mut().typed_insert(
- ContentRange::bytes(range.clone(), metadata.len()).expect("valid ContentRange"),
- );
- }
-
- let mime = mime_guess::from_path(path).first_or_octet_stream();
-
- r.headers_mut()
- .typed_insert(ContentLength(range.end - range.start));
- r.headers_mut().typed_insert(ContentType::from(mime));
- r.headers_mut().typed_insert(AcceptRanges::bytes());
-
- Ok(r)
-}
-
-// Adapted from warp (https://github.com/seanmonstar/warp/blob/master/src/filters/fs.rs). Thanks!
-fn file_stream(
- mut file: File,
- buf_size: usize,
- range: Range<u64>,
-) -> impl Stream<Item = Result<Bytes, io::Error>> + Send {
- use std::io::SeekFrom;
-
- let seek = async move {
- if range.start != 0 {
- file.seek(SeekFrom::Start(range.start)).await?;
- }
- Ok(file)
- };
-
- seek.into_stream()
- .map(move |result| {
- let mut buf = BytesMut::new();
- let mut len = range.end - range.start;
- let mut f = match result {
- Ok(f) => f,
- Err(f) => return Either::Left(stream::once(future::err(f))),
- };
-
- Either::Right(stream::poll_fn(move |cx| {
- if len == 0 {
- return Poll::Ready(None);
- }
- reserve_at_least(&mut buf, buf_size);
-
- let n = match ready!(poll_read_buf(Pin::new(&mut f), cx, &mut buf)) {
- Ok(n) => n as u64,
- Err(err) => {
- debug!("file read error: {}", err);
- return Poll::Ready(Some(Err(err)));
- }
- };
-
- if n == 0 {
- debug!("file read found EOF before expected length");
- return Poll::Ready(None);
- }
-
- let mut chunk = buf.split().freeze();
- if n > len {
- chunk = chunk.split_to(len as usize);
- len = 0;
- } else {
- len -= n;
- }
-
- Poll::Ready(Some(Ok(chunk)))
- }))
- })
- .flatten()
-}
-
-// Also adapted from warp
-fn bytes_range(range: Option<headers::Range>, max_len: u64) -> Result<Range<u64>, ServiceError> {
- use std::ops::Bound;
-
- let range = if let Some(range) = range {
- range
- } else {
- return Ok(0..max_len);
- };
-
- let ret = range
- .iter()
- .map(|(start, end)| {
- let start = match start {
- Bound::Unbounded => 0,
- Bound::Included(s) => s,
- Bound::Excluded(s) => s + 1,
- };
-
- let end = match end {
- Bound::Unbounded => max_len,
- Bound::Included(s) => {
- // For the special case where s == the file size
- if s == max_len {
- s
- } else {
- s + 1
- }
- }
- Bound::Excluded(s) => s,
- };
-
- if start < end && end <= max_len {
- Ok(start..end)
- } else {
- Err(ServiceError::BadRange)
- }
- })
- .next()
- .unwrap_or(Ok(0..max_len));
- ret
-}
-
-fn reserve_at_least(buf: &mut BytesMut, cap: usize) {
- if buf.capacity() - buf.len() < cap {
- buf.reserve(cap);
- }
-}
-
-async fn index(path: &Path, rpath: String) -> Result<String, ServiceError> {
- let files = path
- .read_dir()?
- .map(|e| e.and_then(|e| Ok((e.file_name().into_string().unwrap(), e.metadata()?))))
- .filter(|e| e.as_ref().map(|(e, _)| !e.starts_with(".")).unwrap_or(true))
- .collect::<Result<Vec<_>, _>>()?;
- let banner = read_to_string(path.join("index.banner.html")).await.ok();
- let mut s = String::new();
- IndexTemplate {
- files,
- banner,
- path: rpath,
- }
- .render(&mut s)
- .unwrap();
- Ok(s)
-}
-
-fn html_string_response(s: String) -> hyper::Response<BoxBody<Bytes, ServiceError>> {
- let mut r = Response::new(s);
- r.headers_mut()
- .insert(CONTENT_TYPE, HeaderValue::from_static("text/html"));
- r.map(|b| b.map_err(|e| match e {}).boxed())
-}
-
-markup::define! {
- IndexTemplate(path: String, banner: Option<String>, files: Vec<(String, Metadata)>) {
- @markup::doctype()
- html {
- head {
- meta[charset="UTF-8"];
- title { "Index of " @path }
- }
- body {
- @if let Some(banner) = banner {
- @markup::raw(banner)
- } else {
- h1 { "Index of " @path }
- }
- hr;
- table {
- @if path != "/" {
- tr { td { b { a[href=".."] { "../" } } } }
- }
- @for (name, meta) in files { tr {
- td { a[href=name] {
- @name
- @if meta.file_type().is_dir() { "/" }
- } }
- td {
- @if meta.file_type().is_dir() {
- i { "directory" }
- } else {
- @humansize::format_size(meta.len(), FormatSizeOptions::default())
- }
- }
- } }
- }
- hr;
- footer { sub { "served by " a[href="https://codeberg.org/metamuffin/gnix"] { "gnix" } } }
- }
- }
- }
-}