Decompress objects from git server, refactor BoxByteStream in the process
This commit is contained in:
parent
b88f555390
commit
1667bac39c
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -523,6 +523,7 @@ name = "domani"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"acme2",
|
"acme2",
|
||||||
|
"async-compression",
|
||||||
"bytes",
|
"bytes",
|
||||||
"clap",
|
"clap",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
|
@ -49,6 +49,7 @@ hyper-trust-dns = "0.5.0"
|
|||||||
gix-hash = "0.14.1"
|
gix-hash = "0.14.1"
|
||||||
reqwest = { version = "0.11.23", features = ["gzip", "deflate", "stream"] }
|
reqwest = { version = "0.11.23", features = ["gzip", "deflate", "stream"] }
|
||||||
gix-object = "0.41.0"
|
gix-object = "0.41.0"
|
||||||
|
async-compression = { version = "0.4.6", features = ["tokio", "deflate", "zlib"] }
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
|
|
||||||
|
@ -322,9 +322,9 @@ impl super::Store for FSStore {
|
|||||||
// that is cloned.
|
// that is cloned.
|
||||||
let data = bytes::Bytes::copy_from_slice(file_object.data.as_slice());
|
let data = bytes::Bytes::copy_from_slice(file_object.data.as_slice());
|
||||||
|
|
||||||
Ok(util::into_box_byte_stream(stream::once(
|
Ok(util::BoxByteStream::from_stream(stream::once(async move {
|
||||||
async move { Ok(data) },
|
Ok(data)
|
||||||
)))
|
})))
|
||||||
}
|
}
|
||||||
Kind::Commit | Kind::Tag => Err(unexpected::Error::from(
|
Kind::Commit | Kind::Tag => Err(unexpected::Error::from(
|
||||||
format!("found object of kind {} in tree", file_object.kind).as_str(),
|
format!("found object of kind {} in tree", file_object.kind).as_str(),
|
||||||
|
@ -41,12 +41,11 @@ impl Proxy {
|
|||||||
.or_unexpected_while("parsing url as reqwest url")?
|
.or_unexpected_while("parsing url as reqwest url")?
|
||||||
};
|
};
|
||||||
|
|
||||||
let base_path = match url.to_file_path() {
|
let new_path = url
|
||||||
Ok(path) => path,
|
.path()
|
||||||
Err(()) => return Err(unexpected::Error::from("extracting path from url")),
|
.parse::<std::path::PathBuf>()
|
||||||
};
|
.or_unexpected_while("parsing url path")?
|
||||||
|
.join(sub_path);
|
||||||
let new_path = base_path.join(sub_path);
|
|
||||||
|
|
||||||
url.set_path(
|
url.set_path(
|
||||||
new_path
|
new_path
|
||||||
@ -64,7 +63,7 @@ impl Proxy {
|
|||||||
let (url, branch_name) = Self::deconstruct_descr(descr);
|
let (url, branch_name) = Self::deconstruct_descr(descr);
|
||||||
|
|
||||||
let refs_url =
|
let refs_url =
|
||||||
Self::construct_url(url, "/info/refs").or_unexpected_while("constructing refs url")?;
|
Self::construct_url(url, "info/refs").or_unexpected_while("constructing refs url")?;
|
||||||
|
|
||||||
// when fetching refs we assume that any issue indicates that the origin itself
|
// when fetching refs we assume that any issue indicates that the origin itself
|
||||||
// (and therefore the URL) has some kind of issue.
|
// (and therefore the URL) has some kind of issue.
|
||||||
@ -102,15 +101,13 @@ impl Proxy {
|
|||||||
&self,
|
&self,
|
||||||
descr: &origin::Descr,
|
descr: &origin::Descr,
|
||||||
oid: &gix_hash::ObjectId,
|
oid: &gix_hash::ObjectId,
|
||||||
) -> unexpected::Result<Option<reqwest::Response>> {
|
) -> unexpected::Result<Option<util::BoxByteStream>> {
|
||||||
let hex = oid.to_string();
|
let hex = oid.to_string();
|
||||||
let (url, _) = Self::deconstruct_descr(descr);
|
let (url, _) = Self::deconstruct_descr(descr);
|
||||||
|
|
||||||
let object_url = Self::construct_url(
|
let object_url =
|
||||||
url,
|
Self::construct_url(url, format!("objects/{}/{}", &hex[..2], &hex[2..]).as_str())
|
||||||
format!("/objects/{}/{}", &hex[..2], &hex[2..]).as_str(),
|
.or_unexpected_while("constructing refs url")?;
|
||||||
)
|
|
||||||
.or_unexpected_while("constructing refs url")?;
|
|
||||||
|
|
||||||
Ok(self
|
Ok(self
|
||||||
.client
|
.client
|
||||||
@ -119,7 +116,19 @@ impl Proxy {
|
|||||||
.await
|
.await
|
||||||
.or_unexpected_while("performing request")?
|
.or_unexpected_while("performing request")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.ok())
|
.ok()
|
||||||
|
.map(|res| {
|
||||||
|
use async_compression::tokio::bufread::ZlibDecoder;
|
||||||
|
use futures::stream::TryStreamExt;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
let r = tokio_util::io::StreamReader::new(
|
||||||
|
res.bytes_stream()
|
||||||
|
.map_err(|e| io::Error::new(io::ErrorKind::Other, e)),
|
||||||
|
);
|
||||||
|
|
||||||
|
util::BoxByteStream::from_async_read(ZlibDecoder::new(r))
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_commit_tree(
|
async fn get_commit_tree(
|
||||||
@ -131,7 +140,7 @@ impl Proxy {
|
|||||||
.get_object(descr, commit_hash)
|
.get_object(descr, commit_hash)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(origin::SyncError::Unavailable)?
|
.ok_or(origin::SyncError::Unavailable)?
|
||||||
.bytes()
|
.read_to_end()
|
||||||
.await
|
.await
|
||||||
.or(Err(origin::SyncError::Unavailable))?;
|
.or(Err(origin::SyncError::Unavailable))?;
|
||||||
|
|
||||||
@ -153,7 +162,7 @@ impl Proxy {
|
|||||||
.get_object(descr, tree_hash)
|
.get_object(descr, tree_hash)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(origin::GetFileError::Unavailable)?
|
.ok_or(origin::GetFileError::Unavailable)?
|
||||||
.bytes()
|
.read_to_end()
|
||||||
.await
|
.await
|
||||||
.or(Err(origin::GetFileError::Unavailable))?;
|
.or(Err(origin::GetFileError::Unavailable))?;
|
||||||
|
|
||||||
@ -288,17 +297,10 @@ impl origin::Store for Proxy {
|
|||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = self
|
Ok(self
|
||||||
.get_object(&descr, &entry.oid)
|
.get_object(&descr, &entry.oid)
|
||||||
.await?
|
.await?
|
||||||
.map_unexpected_while(|| format!("object for entry {:?} not found", entry))?
|
.map_unexpected_while(|| format!("object for entry {:?} not found", entry))?)
|
||||||
.bytes_stream();
|
|
||||||
|
|
||||||
use futures::StreamExt;
|
|
||||||
Ok(util::into_box_byte_stream(res.map(|r| {
|
|
||||||
use std::io::{Error, ErrorKind};
|
|
||||||
r.map_err(|e| Error::new(ErrorKind::ConnectionAborted, e))
|
|
||||||
})))
|
|
||||||
|
|
||||||
// TODO this is still not correct, as it will include the git object header
|
// TODO this is still not correct, as it will include the git object header
|
||||||
})
|
})
|
||||||
|
@ -62,7 +62,7 @@ impl Service {
|
|||||||
w.write_all("\r\n".as_bytes()).await.or_unexpected()?;
|
w.write_all("\r\n".as_bytes()).await.or_unexpected()?;
|
||||||
|
|
||||||
if let Some(body) = body {
|
if let Some(body) = body {
|
||||||
let mut body = tokio_util::io::StreamReader::new(body);
|
let mut body = body.into_async_read();
|
||||||
copy(&mut body, &mut w).await.or_unexpected()?;
|
copy(&mut body, &mut w).await.or_unexpected()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ impl Service {
|
|||||||
|
|
||||||
use domain::manager::GetFileError;
|
use domain::manager::GetFileError;
|
||||||
match self.domain_manager.get_file(&settings, &path).await {
|
match self.domain_manager.get_file(&settings, &path).await {
|
||||||
Ok(f) => self.serve(200, &path, Body::wrap_stream(f)),
|
Ok(f) => self.serve(200, &path, Body::wrap_stream(f.into_stream())),
|
||||||
Err(GetFileError::FileNotFound) => self.render_error_page(404, "File not found"),
|
Err(GetFileError::FileNotFound) => self.render_error_page(404, "File not found"),
|
||||||
Err(GetFileError::Unavailable) => self.render_error_page(502, "Content unavailable"),
|
Err(GetFileError::Unavailable) => self.render_error_page(502, "Content unavailable"),
|
||||||
Err(GetFileError::DescrNotSynced) => self.internal_error(
|
Err(GetFileError::DescrNotSynced) => self.internal_error(
|
||||||
|
39
src/util.rs
39
src/util.rs
@ -1,3 +1,4 @@
|
|||||||
|
use futures::stream::BoxStream;
|
||||||
use std::{fs, io, path, pin};
|
use std::{fs, io, path, pin};
|
||||||
|
|
||||||
pub fn open_file(path: &path::Path) -> io::Result<Option<fs::File>> {
|
pub fn open_file(path: &path::Path) -> io::Result<Option<fs::File>> {
|
||||||
@ -31,13 +32,39 @@ pub fn parse_file<T: std::str::FromStr>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type BoxByteStream = futures::stream::BoxStream<'static, io::Result<bytes::Bytes>>;
|
pub struct BoxByteStream(BoxStream<'static, io::Result<bytes::Bytes>>);
|
||||||
|
|
||||||
pub fn into_box_byte_stream<T>(v: T) -> BoxByteStream
|
impl BoxByteStream {
|
||||||
where
|
pub fn from_stream<S>(s: S) -> Self
|
||||||
T: futures::stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send + 'static,
|
where
|
||||||
{
|
S: futures::stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send + 'static,
|
||||||
Box::into_pin(Box::new(v))
|
{
|
||||||
|
Self(Box::into_pin(Box::new(s)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_async_read<R>(r: R) -> Self
|
||||||
|
where
|
||||||
|
R: tokio::io::AsyncRead + Send + 'static,
|
||||||
|
{
|
||||||
|
Self::from_stream(tokio_util::io::ReaderStream::new(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_stream(
|
||||||
|
self,
|
||||||
|
) -> impl futures::stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send + 'static {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_async_read(self) -> impl tokio::io::AsyncRead + Send + 'static {
|
||||||
|
tokio_util::io::StreamReader::new(self.into_stream())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn read_to_end(self) -> io::Result<Vec<u8>> {
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
let mut buf = Vec::<u8>::new();
|
||||||
|
self.into_async_read().read_to_end(&mut buf).await?;
|
||||||
|
Ok(buf)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type BoxFuture<'a, O> = pin::Pin<Box<dyn futures::Future<Output = O> + Send + 'a>>;
|
pub type BoxFuture<'a, O> = pin::Pin<Box<dyn futures::Future<Output = O> + Send + 'a>>;
|
||||||
|
Loading…
Reference in New Issue
Block a user