Get rid of origin::Origin trait completely, move read_file_into onto the origin::Store itself

This commit is contained in:
Brian Picciano 2023-07-05 19:03:51 +02:00
parent 773001b158
commit 5e264093ec
7 changed files with 270 additions and 244 deletions

View File

@ -25,19 +25,34 @@ impl From<config::GetError> for GetConfigError {
} }
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum GetOriginError { pub enum ReadFileIntoError {
#[error("not found")] #[error("domain not found")]
NotFound, DomainNotFound,
#[error("file not found")]
FileNotFound,
#[error(transparent)] #[error(transparent)]
Unexpected(#[from] unexpected::Error), Unexpected(#[from] unexpected::Error),
} }
impl From<config::GetError> for GetOriginError { impl From<config::GetError> for ReadFileIntoError {
fn from(e: config::GetError) -> GetOriginError { fn from(e: config::GetError) -> Self {
match e { match e {
config::GetError::NotFound => GetOriginError::NotFound, config::GetError::NotFound => Self::DomainNotFound,
config::GetError::Unexpected(e) => GetOriginError::Unexpected(e), config::GetError::Unexpected(e) => Self::Unexpected(e),
}
}
}
impl From<origin::ReadFileIntoError> for ReadFileIntoError {
fn from(e: origin::ReadFileIntoError) -> Self {
match e {
origin::ReadFileIntoError::DescrNotSynced => {
Self::Unexpected(unexpected::Error::from("origin descr not synced"))
}
origin::ReadFileIntoError::FileNotFound => Self::FileNotFound,
origin::ReadFileIntoError::Unexpected(e) => Self::Unexpected(e),
} }
} }
} }
@ -84,13 +99,13 @@ pub enum SyncWithConfigError {
Unexpected(#[from] unexpected::Error), Unexpected(#[from] unexpected::Error),
} }
impl From<origin::store::SyncError> for SyncWithConfigError { impl From<origin::SyncError> for SyncWithConfigError {
fn from(e: origin::store::SyncError) -> SyncWithConfigError { fn from(e: origin::SyncError) -> SyncWithConfigError {
match e { match e {
origin::store::SyncError::InvalidURL => SyncWithConfigError::InvalidURL, origin::SyncError::InvalidURL => SyncWithConfigError::InvalidURL,
origin::store::SyncError::InvalidBranchName => SyncWithConfigError::InvalidBranchName, origin::SyncError::InvalidBranchName => SyncWithConfigError::InvalidBranchName,
origin::store::SyncError::AlreadyInProgress => SyncWithConfigError::AlreadyInProgress, origin::SyncError::AlreadyInProgress => SyncWithConfigError::AlreadyInProgress,
origin::store::SyncError::Unexpected(e) => SyncWithConfigError::Unexpected(e), origin::SyncError::Unexpected(e) => SyncWithConfigError::Unexpected(e),
} }
} }
} }
@ -121,10 +136,12 @@ pub type GetAcmeHttp01ChallengeKeyError = acme::manager::GetHttp01ChallengeKeyEr
pub trait Manager: Sync + Send + rustls::server::ResolvesServerCert { pub trait Manager: Sync + Send + rustls::server::ResolvesServerCert {
fn get_config(&self, domain: &domain::Name) -> Result<config::Config, GetConfigError>; fn get_config(&self, domain: &domain::Name) -> Result<config::Config, GetConfigError>;
fn get_origin( fn read_file_into(
&self, &self,
domain: &domain::Name, domain: &domain::Name,
) -> Result<sync::Arc<dyn origin::Origin>, GetOriginError>; path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), ReadFileIntoError>;
fn sync_cert<'mgr>( fn sync_cert<'mgr>(
&'mgr self, &'mgr self,
@ -146,7 +163,7 @@ pub trait Manager: Sync + Send + rustls::server::ResolvesServerCert {
} }
pub struct ManagerImpl { pub struct ManagerImpl {
origin_store: Box<dyn origin::store::Store + Send + Sync>, origin_store: Box<dyn origin::Store + Send + Sync>,
domain_config_store: Box<dyn config::Store + Send + Sync>, domain_config_store: Box<dyn config::Store + Send + Sync>,
domain_checker: checker::DNSChecker, domain_checker: checker::DNSChecker,
acme_manager: Option<Box<dyn acme::manager::Manager + Send + Sync>>, acme_manager: Option<Box<dyn acme::manager::Manager + Send + Sync>>,
@ -154,7 +171,7 @@ pub struct ManagerImpl {
impl ManagerImpl { impl ManagerImpl {
pub fn new< pub fn new<
OriginStore: origin::store::Store + Send + Sync + 'static, OriginStore: origin::Store + Send + Sync + 'static,
DomainConfigStore: config::Store + Send + Sync + 'static, DomainConfigStore: config::Store + Send + Sync + 'static,
AcmeManager: acme::manager::Manager + Send + Sync + 'static, AcmeManager: acme::manager::Manager + Send + Sync + 'static,
>( >(
@ -212,17 +229,16 @@ impl Manager for ManagerImpl {
Ok(self.domain_config_store.get(domain)?) Ok(self.domain_config_store.get(domain)?)
} }
fn get_origin( fn read_file_into(
&self, &self,
domain: &domain::Name, domain: &domain::Name,
) -> Result<sync::Arc<dyn origin::Origin>, GetOriginError> { path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), ReadFileIntoError> {
let config = self.domain_config_store.get(domain)?; let config = self.domain_config_store.get(domain)?;
let origin = self self.origin_store
.origin_store .read_file_into(&config.origin_descr, path, into)?;
.get(&config.origin_descr) Ok(())
// if there's a config there should be an origin, any error here is unexpected
.or_unexpected()?;
Ok(origin)
} }
fn sync_cert<'mgr>( fn sync_cert<'mgr>(

View File

@ -73,7 +73,7 @@ async fn main() {
) )
.init(); .init();
let origin_store = domani::origin::store::git::FSStore::new(config.origin_store_git_dir_path) let origin_store = domani::origin::git::FSStore::new(config.origin_store_git_dir_path)
.expect("git origin store initialization failed"); .expect("git origin store initialization failed");
let domain_checker = domani::domain::checker::DNSChecker::new( let domain_checker = domani::domain::checker::DNSChecker::new(

View File

@ -1,11 +1,38 @@
use crate::error::unexpected; use crate::error::unexpected;
use std::sync;
pub mod git;
pub mod mux;
mod descr; mod descr;
pub use self::descr::Descr; pub use descr::Descr;
pub mod store;
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum SyncError {
#[error("invalid url")]
InvalidURL,
#[error("invalid branch name")]
InvalidBranchName,
#[error("already in progress")]
AlreadyInProgress,
#[error(transparent)]
Unexpected(#[from] unexpected::Error),
}
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum AllDescrsError {
#[error(transparent)]
Unexpected(#[from] unexpected::Error),
}
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum ReadFileIntoError { pub enum ReadFileIntoError {
#[error("descr not synced")]
DescrNotSynced,
#[error("file not found")] #[error("file not found")]
FileNotFound, FileNotFound,
@ -14,12 +41,41 @@ pub enum ReadFileIntoError {
} }
#[mockall::automock] #[mockall::automock]
/// Describes an origin which has already been synced locally and is available for reading files /// Describes a storage mechanism for Origins. Each Origin is uniquely identified by its Descr.
/// from. pub trait Store {
pub trait Origin { /// If the origin is of a kind which can be updated, sync will pull down the latest version of
/// the origin into the storage.
fn sync(&self, descr: &Descr) -> Result<(), SyncError>;
fn read_file_into( fn read_file_into(
&self, &self,
descr: &Descr,
path: &str, path: &str,
into: &mut dyn std::io::Write, into: &mut dyn std::io::Write,
) -> Result<(), ReadFileIntoError>; ) -> Result<(), ReadFileIntoError>;
fn all_descrs(&self) -> Result<Vec<Descr>, AllDescrsError>;
}
pub fn new_mock() -> sync::Arc<sync::Mutex<MockStore>> {
sync::Arc::new(sync::Mutex::new(MockStore::new()))
}
impl Store for sync::Arc<sync::Mutex<MockStore>> {
fn sync(&self, descr: &Descr) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr)
}
fn all_descrs(&self) -> Result<Vec<Descr>, AllDescrsError> {
self.lock().unwrap().all_descrs()
}
fn read_file_into(
&self,
descr: &Descr,
path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), ReadFileIntoError> {
self.lock().unwrap().read_file_into(descr, path, into)
}
} }

View File

@ -1,52 +1,17 @@
use crate::error::unexpected::{self, Intoable, Mappable}; use crate::error::unexpected::{self, Intoable, Mappable};
use crate::origin::{self, store}; use crate::origin;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::{collections, fs, io, sync}; use std::{collections, fs, io, sync};
#[derive(Clone)] #[derive(Clone)]
struct Origin { struct RepoSnapshot {
repo: sync::Arc<gix::ThreadSafeRepository>, repo: sync::Arc<gix::ThreadSafeRepository>,
tree_object_id: gix::ObjectId, tree_object_id: gix::ObjectId,
} }
impl origin::Origin for Origin {
fn read_file_into(
&self,
path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), origin::ReadFileIntoError> {
let mut clean_path = Path::new(path);
clean_path = clean_path.strip_prefix("/").unwrap_or(clean_path);
let repo = self.repo.to_thread_local();
let file_object = repo
.find_object(self.tree_object_id)
.map_unexpected_while(|| format!("finding tree object {}", self.tree_object_id))?
.peel_to_tree()
.map_unexpected_while(|| format!("peeling tree object {}", self.tree_object_id))?
.lookup_entry_by_path(clean_path)
.map_unexpected_while(|| {
format!(
"looking up {} in tree object {}",
clean_path.display(),
self.tree_object_id
)
})?
.ok_or(origin::ReadFileIntoError::FileNotFound)?
.object()
.or_unexpected()?;
into.write_all(file_object.data.as_ref())
.or_unexpected_while("copying out file")?;
Ok(())
}
}
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
enum GetOriginError { enum CreateRepoSnapshotError {
#[error("invalid branch name")] #[error("invalid branch name")]
InvalidBranchName, InvalidBranchName,
@ -54,8 +19,8 @@ enum GetOriginError {
Unexpected(#[from] unexpected::Error), Unexpected(#[from] unexpected::Error),
} }
/// Implements the Store trait for any Descr::Git based Origins, storing the git repos on disk. If /// Implements the Store trait for Descr::Git, storing the git repos on disk. If any non-git Descrs
/// any non-git Descrs are used then this implementation will panic. /// are used then this implementation will panic.
pub struct FSStore { pub struct FSStore {
dir_path: PathBuf, dir_path: PathBuf,
@ -63,7 +28,7 @@ pub struct FSStore {
// more than one origin to be syncing at a time // more than one origin to be syncing at a time
sync_guard: sync::Mutex<collections::HashMap<origin::Descr, ()>>, sync_guard: sync::Mutex<collections::HashMap<origin::Descr, ()>>,
origins: sync::RwLock<collections::HashMap<origin::Descr, sync::Arc<Origin>>>, repo_snapshots: sync::RwLock<collections::HashMap<origin::Descr, sync::Arc<RepoSnapshot>>>,
} }
impl FSStore { impl FSStore {
@ -72,7 +37,7 @@ impl FSStore {
Ok(Self { Ok(Self {
dir_path, dir_path,
sync_guard: sync::Mutex::new(collections::HashMap::new()), sync_guard: sync::Mutex::new(collections::HashMap::new()),
origins: sync::RwLock::new(collections::HashMap::new()), repo_snapshots: sync::RwLock::new(collections::HashMap::new()),
}) })
} }
@ -88,11 +53,11 @@ impl FSStore {
format!("origin/{branch_name}") format!("origin/{branch_name}")
} }
fn get_origin( fn create_repo_snapshot(
&self, &self,
repo: gix::Repository, repo: gix::Repository,
descr: &origin::Descr, descr: &origin::Descr,
) -> Result<Origin, GetOriginError> { ) -> Result<RepoSnapshot, CreateRepoSnapshotError> {
let origin::Descr::Git { let origin::Descr::Git {
ref branch_name, .. ref branch_name, ..
} = descr; } = descr;
@ -102,7 +67,7 @@ impl FSStore {
let commit_object_id = repo let commit_object_id = repo
.try_find_reference(&branch_ref) .try_find_reference(&branch_ref)
.map_unexpected_while(|| format!("finding branch ref {branch_ref}"))? .map_unexpected_while(|| format!("finding branch ref {branch_ref}"))?
.ok_or(GetOriginError::InvalidBranchName)? .ok_or(CreateRepoSnapshotError::InvalidBranchName)?
.peel_to_id_in_place() .peel_to_id_in_place()
.or_unexpected_while("peeling id in place")? .or_unexpected_while("peeling id in place")?
.detach(); .detach();
@ -114,13 +79,60 @@ impl FSStore {
.map_unexpected_while(|| format!("parsing {commit_object_id} as commit"))? .map_unexpected_while(|| format!("parsing {commit_object_id} as commit"))?
.tree(); .tree();
Ok(Origin { Ok(RepoSnapshot {
repo: sync::Arc::new(repo.into()), repo: sync::Arc::new(repo.into()),
tree_object_id, tree_object_id,
}) })
} }
fn sync_inner(&self, descr: &origin::Descr) -> Result<gix::Repository, store::SyncError> { fn get_repo_snapshot(
&self,
descr: &origin::Descr,
) -> Result<Option<sync::Arc<RepoSnapshot>>, unexpected::Error> {
{
let repo_snapshots = self.repo_snapshots.read().unwrap();
if let Some(repo_snapshot) = repo_snapshots.get(descr) {
return Ok(Some(repo_snapshot.clone()));
}
}
let repo_path = self.repo_path(descr);
match fs::read_dir(&repo_path) {
Ok(_) => (),
Err(e) => match e.kind() {
io::ErrorKind::NotFound => return Ok(None),
_ => {
return Err(e.into_unexpected_while(format!(
"checking if {} exists",
repo_path.display()
)))
}
},
}
let repo = gix::open(&repo_path)
.map_unexpected_while(|| format!("opening {} as git repo", repo_path.display()))?;
let repo_snapshot = self
.create_repo_snapshot(repo, descr)
.map_err(|e| match e {
// it's not expected that the branch name is invalid at this point, it must have
// existed for sync to have been successful.
CreateRepoSnapshotError::InvalidBranchName => e.into_unexpected().into(),
CreateRepoSnapshotError::Unexpected(e) => e,
})?;
let repo_snapshot = sync::Arc::new(repo_snapshot);
let mut repo_snapshots = self.repo_snapshots.write().unwrap();
(*repo_snapshots).insert(descr.clone(), repo_snapshot.clone());
Ok(Some(repo_snapshot))
}
fn sync_inner(&self, descr: &origin::Descr) -> Result<gix::Repository, origin::SyncError> {
use gix::clone::Error as gixCloneErr; use gix::clone::Error as gixCloneErr;
use gix::progress::Discard; use gix::progress::Discard;
@ -141,10 +153,10 @@ impl FSStore {
let (repo, _) = gix::prepare_clone_bare(url.clone(), repo_path) let (repo, _) = gix::prepare_clone_bare(url.clone(), repo_path)
.map_err(|e| match e { .map_err(|e| match e {
gixCloneErr::Init(gix::init::Error::InvalidBranchName { .. }) => { gixCloneErr::Init(gix::init::Error::InvalidBranchName { .. }) => {
store::SyncError::InvalidBranchName origin::SyncError::InvalidBranchName
} }
gixCloneErr::UrlParse(_) | gixCloneErr::CanonicalizeUrl { .. } => { gixCloneErr::UrlParse(_) | gixCloneErr::CanonicalizeUrl { .. } => {
store::SyncError::InvalidURL origin::SyncError::InvalidURL
} }
_ => e _ => e
.into_unexpected_while(format!( .into_unexpected_while(format!(
@ -155,14 +167,14 @@ impl FSStore {
.into(), .into(),
})? })?
.fetch_only(Discard, should_interrupt) .fetch_only(Discard, should_interrupt)
.map_err(|_| store::SyncError::InvalidURL)?; .map_err(|_| origin::SyncError::InvalidURL)?;
// Check to make sure the branch name exists // Check to make sure the branch name exists
// TODO if this fails we should delete repo_path // TODO if this fails we should delete repo_path
let branch_ref = self.branch_ref(branch_name); let branch_ref = self.branch_ref(branch_name);
repo.try_find_reference(&branch_ref) repo.try_find_reference(&branch_ref)
.map_unexpected_while(|| format!("finding branch ref {branch_ref}"))? .map_unexpected_while(|| format!("finding branch ref {branch_ref}"))?
.ok_or(store::SyncError::InvalidBranchName)?; .ok_or(origin::SyncError::InvalidBranchName)?;
// Add the descr to the repo directory, so we can know the actual descr later // Add the descr to the repo directory, so we can know the actual descr later
// TODO if this fails we should delete repo_path // TODO if this fails we should delete repo_path
@ -199,7 +211,7 @@ impl FSStore {
} }
impl super::Store for FSStore { impl super::Store for FSStore {
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> { fn sync(&self, descr: &origin::Descr) -> Result<(), origin::SyncError> {
// attempt to lock this descr for syncing, doing so within a new scope so the mutex // attempt to lock this descr for syncing, doing so within a new scope so the mutex
// isn't actually being held for the whole method duration. // isn't actually being held for the whole method duration.
let is_already_syncing = { let is_already_syncing = {
@ -211,7 +223,7 @@ impl super::Store for FSStore {
}; };
if is_already_syncing { if is_already_syncing {
return Err(store::SyncError::AlreadyInProgress); return Err(origin::SyncError::AlreadyInProgress);
} }
let res = self.sync_inner(descr); let res = self.sync_inner(descr);
@ -224,7 +236,7 @@ impl super::Store for FSStore {
}; };
// repo is synced at this point (though the sync lock is still held), just gotta create // repo is synced at this point (though the sync lock is still held), just gotta create
// the origin and store it. // the RepoSnapshot and store it.
// //
// TODO this is a bit of a memory leak, but by the time we get // TODO this is a bit of a memory leak, but by the time we get
// to that point this should all be backed by something which isn't local storage // to that point this should all be backed by something which isn't local storage
@ -232,56 +244,22 @@ impl super::Store for FSStore {
// calling this while the sync lock is held isn't ideal, but it's convenient and // calling this while the sync lock is held isn't ideal, but it's convenient and
// shouldn't be too terrible generally // shouldn't be too terrible generally
let origin = self.get_origin(repo, descr).map_err(|e| match e { let repo_snapshot = self
GetOriginError::InvalidBranchName => store::SyncError::InvalidBranchName, .create_repo_snapshot(repo, descr)
GetOriginError::Unexpected(e) => store::SyncError::Unexpected(e), .map_err(|e| match e {
})?; CreateRepoSnapshotError::InvalidBranchName => origin::SyncError::InvalidBranchName,
CreateRepoSnapshotError::Unexpected(e) => origin::SyncError::Unexpected(e),
})?;
let mut origins = self.origins.write().unwrap(); let mut repo_snapshots = self.repo_snapshots.write().unwrap();
(*origins).insert(descr.clone(), sync::Arc::new(origin)); (*repo_snapshots).insert(descr.clone(), sync::Arc::new(repo_snapshot));
Ok(()) Ok(())
} }
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> { fn all_descrs(&self) -> Result<Vec<origin::Descr>, origin::AllDescrsError> {
{
let origins = self.origins.read().unwrap();
if let Some(origin) = origins.get(descr) {
return Ok(origin.clone());
}
}
let repo_path = self.repo_path(descr);
fs::read_dir(&repo_path).map_err(|e| match e.kind() {
io::ErrorKind::NotFound => store::GetError::NotFound,
_ => e
.into_unexpected_while(format!("checking if {} exists", repo_path.display()))
.into(),
})?;
let repo = gix::open(&repo_path)
.map_unexpected_while(|| format!("opening {} as git repo", repo_path.display()))?;
let origin = self.get_origin(repo, descr).map_err(|e| match e {
// it's not expected that the branch name is invalid at this point, it must have
// existed for sync to have been successful.
GetOriginError::InvalidBranchName => e.into_unexpected().into(),
GetOriginError::Unexpected(e) => store::GetError::Unexpected(e),
})?;
let origin = sync::Arc::new(origin);
let mut origins = self.origins.write().unwrap();
(*origins).insert(descr.clone(), origin.clone());
Ok(origin)
}
fn all_descrs(&self) -> Result<Vec<origin::Descr>, store::AllDescrsError> {
fs::read_dir(&self.dir_path).or_unexpected()?.map( fs::read_dir(&self.dir_path).or_unexpected()?.map(
|dir_entry_res: io::Result<fs::DirEntry>| -> Result<origin::Descr, store::AllDescrsError> { |dir_entry_res: io::Result<fs::DirEntry>| -> Result<origin::Descr, origin::AllDescrsError> {
let descr_id: String = dir_entry_res let descr_id: String = dir_entry_res
.or_unexpected()? .or_unexpected()?
.file_name() .file_name()
@ -309,11 +287,55 @@ impl super::Store for FSStore {
}, },
).try_collect() ).try_collect()
} }
fn read_file_into(
&self,
descr: &origin::Descr,
path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), origin::ReadFileIntoError> {
let repo_snapshot = match self.get_repo_snapshot(descr) {
Ok(Some(repo_snapshot)) => repo_snapshot,
Ok(None) => return Err(origin::ReadFileIntoError::DescrNotSynced),
Err(e) => return Err(e.into()),
};
let mut clean_path = Path::new(path);
clean_path = clean_path.strip_prefix("/").unwrap_or(clean_path);
let repo = repo_snapshot.repo.to_thread_local();
let file_object = repo
.find_object(repo_snapshot.tree_object_id)
.map_unexpected_while(|| {
format!("finding tree object {}", repo_snapshot.tree_object_id)
})?
.peel_to_tree()
.map_unexpected_while(|| {
format!("peeling tree object {}", repo_snapshot.tree_object_id)
})?
.lookup_entry_by_path(clean_path)
.map_unexpected_while(|| {
format!(
"looking up {} in tree object {}",
clean_path.display(),
repo_snapshot.tree_object_id
)
})?
.ok_or(origin::ReadFileIntoError::FileNotFound)?
.object()
.or_unexpected()?;
into.write_all(file_object.data.as_ref())
.or_unexpected_while("copying out file")?;
Ok(())
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::origin::{self, store, store::Store}; use crate::origin::{self, Store};
use tempdir::TempDir; use tempdir::TempDir;
#[test] #[test]
@ -337,17 +359,10 @@ mod tests {
store.sync(&descr).expect("sync should succeed"); store.sync(&descr).expect("sync should succeed");
store.sync(&descr).expect("second sync should succeed"); store.sync(&descr).expect("second sync should succeed");
assert!(matches!(
store.get(&other_descr),
Err::<_, store::GetError>(store::GetError::NotFound),
));
let origin = store.get(&descr).expect("origin retrieved");
let assert_write = |path: &str| { let assert_write = |path: &str| {
let mut into: Vec<u8> = vec![]; let mut into: Vec<u8> = vec![];
origin store
.read_file_into(path, &mut into) .read_file_into(&descr, path, &mut into)
.expect("write should succeed"); .expect("write should succeed");
assert!(into.len() > 0); assert!(into.len() > 0);
}; };
@ -355,10 +370,17 @@ mod tests {
assert_write("src/lib.rs"); assert_write("src/lib.rs");
assert_write("/src/lib.rs"); assert_write("/src/lib.rs");
// File doesn't exist
let mut into: Vec<u8> = vec![]; let mut into: Vec<u8> = vec![];
// RepoSnapshot doesn't exist
assert!(matches!( assert!(matches!(
origin.read_file_into("DNE", &mut into), store.read_file_into(&other_descr, "DNE", &mut into),
Err::<_, origin::ReadFileIntoError>(origin::ReadFileIntoError::DescrNotSynced),
));
// File doesn't exist
assert!(matches!(
store.read_file_into(&descr, "DNE", &mut into),
Err::<(), origin::ReadFileIntoError>(origin::ReadFileIntoError::FileNotFound), Err::<(), origin::ReadFileIntoError>(origin::ReadFileIntoError::FileNotFound),
)); ));
assert_eq!(into.len(), 0); assert_eq!(into.len(), 0);

View File

@ -1,10 +1,9 @@
use crate::error::unexpected::Mappable; use crate::error::unexpected::Mappable;
use crate::origin::{self, store}; use crate::origin;
use std::sync;
pub struct Store<F, S> pub struct Store<F, S>
where where
S: store::Store + 'static, S: origin::Store + 'static,
F: Fn(&origin::Descr) -> Option<S> + Sync + Send, F: Fn(&origin::Descr) -> Option<S> + Sync + Send,
{ {
mapping_fn: F, mapping_fn: F,
@ -13,7 +12,7 @@ where
impl<F, S> Store<F, S> impl<F, S> Store<F, S>
where where
S: store::Store + 'static, S: origin::Store + 'static,
F: Fn(&origin::Descr) -> Option<S> + Sync + Send, F: Fn(&origin::Descr) -> Option<S> + Sync + Send,
{ {
pub fn new(mapping_fn: F, stores: Vec<S>) -> Store<F, S> { pub fn new(mapping_fn: F, stores: Vec<S>) -> Store<F, S> {
@ -21,24 +20,18 @@ where
} }
} }
impl<F, S> store::Store for Store<F, S> impl<F, S> origin::Store for Store<F, S>
where where
S: store::Store + 'static, S: origin::Store + 'static,
F: Fn(&origin::Descr) -> Option<S> + Sync + Send, F: Fn(&origin::Descr) -> Option<S> + Sync + Send,
{ {
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> { fn sync(&self, descr: &origin::Descr) -> Result<(), origin::SyncError> {
(self.mapping_fn)(descr) (self.mapping_fn)(descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))? .or_unexpected_while(format!("mapping {:?} to store", &descr))?
.sync(descr) .sync(descr)
} }
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> { fn all_descrs(&self) -> Result<Vec<origin::Descr>, origin::AllDescrsError> {
(self.mapping_fn)(descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.get(descr)
}
fn all_descrs(&self) -> Result<Vec<origin::Descr>, store::AllDescrsError> {
let mut res = Vec::<origin::Descr>::new(); let mut res = Vec::<origin::Descr>::new();
for store in self.stores.iter() { for store in self.stores.iter() {
@ -47,20 +40,31 @@ where
Ok(res) Ok(res)
} }
fn read_file_into(
&self,
descr: &origin::Descr,
path: &str,
into: &mut dyn std::io::Write,
) -> Result<(), origin::ReadFileIntoError> {
(self.mapping_fn)(descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.read_file_into(descr, path, into)
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::origin::{self, store}; use crate::origin;
use std::sync; use std::sync;
struct Harness { struct Harness {
descr_a: origin::Descr, descr_a: origin::Descr,
descr_b: origin::Descr, descr_b: origin::Descr,
descr_unknown: origin::Descr, descr_unknown: origin::Descr,
store_a: sync::Arc<sync::Mutex<store::MockStore>>, store_a: sync::Arc<sync::Mutex<origin::MockStore>>,
store_b: sync::Arc<sync::Mutex<store::MockStore>>, store_b: sync::Arc<sync::Mutex<origin::MockStore>>,
store: Box<dyn store::Store>, store: Box<dyn origin::Store>,
} }
impl Harness { impl Harness {
@ -75,8 +79,8 @@ mod tests {
branch_name: "B".to_string(), branch_name: "B".to_string(),
}; };
let store_a = store::new_mock(); let store_a = origin::new_mock();
let store_b = store::new_mock(); let store_b = origin::new_mock();
Harness { Harness {
descr_a: descr_a.clone(), descr_a: descr_a.clone(),
@ -119,7 +123,7 @@ mod tests {
.expect_sync() .expect_sync()
.withf(move |descr: &origin::Descr| descr == &descr_a) .withf(move |descr: &origin::Descr| descr == &descr_a)
.times(1) .times(1)
.return_const(Ok::<(), store::SyncError>(())); .return_const(Ok::<(), origin::SyncError>(()));
} }
assert_eq!(Ok(()), h.store.sync(&h.descr_a)); assert_eq!(Ok(()), h.store.sync(&h.descr_a));
@ -132,7 +136,7 @@ mod tests {
.expect_sync() .expect_sync()
.withf(move |descr: &origin::Descr| descr == &descr_b) .withf(move |descr: &origin::Descr| descr == &descr_b)
.times(1) .times(1)
.return_const(Ok::<(), store::SyncError>(())); .return_const(Ok::<(), origin::SyncError>(()));
} }
assert_eq!(Ok(()), h.store.sync(&h.descr_b)); assert_eq!(Ok(()), h.store.sync(&h.descr_b));
@ -149,7 +153,7 @@ mod tests {
.unwrap() .unwrap()
.expect_all_descrs() .expect_all_descrs()
.times(1) .times(1)
.return_const(Ok::<Vec<origin::Descr>, store::AllDescrsError>(vec![h .return_const(Ok::<Vec<origin::Descr>, origin::AllDescrsError>(vec![h
.descr_a .descr_a
.clone()])); .clone()]));
@ -158,7 +162,7 @@ mod tests {
.unwrap() .unwrap()
.expect_all_descrs() .expect_all_descrs()
.times(1) .times(1)
.return_const(Ok::<Vec<origin::Descr>, store::AllDescrsError>(vec![h .return_const(Ok::<Vec<origin::Descr>, origin::AllDescrsError>(vec![h
.descr_b .descr_b
.clone()])); .clone()]));

View File

@ -1,65 +0,0 @@
use crate::error::unexpected;
use crate::origin;
use std::sync;
pub mod git;
pub mod mux;
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum SyncError {
#[error("invalid url")]
InvalidURL,
#[error("invalid branch name")]
InvalidBranchName,
#[error("already in progress")]
AlreadyInProgress,
#[error(transparent)]
Unexpected(#[from] unexpected::Error),
}
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum GetError {
#[error("not found")]
NotFound,
#[error(transparent)]
Unexpected(#[from] unexpected::Error),
}
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum AllDescrsError {
#[error(transparent)]
Unexpected(#[from] unexpected::Error),
}
#[mockall::automock]
/// Describes a storage mechanism for Origins. Each Origin is uniquely identified by its Descr.
pub trait Store {
/// If the origin is of a kind which can be updated, sync will pull down the latest version of
/// the origin into the storage.
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError>;
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError>;
fn all_descrs(&self) -> Result<Vec<origin::Descr>, AllDescrsError>;
}
pub fn new_mock() -> sync::Arc<sync::Mutex<MockStore>> {
sync::Arc::new(sync::Mutex::new(MockStore::new()))
}
impl Store for sync::Arc<sync::Mutex<MockStore>> {
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr)
}
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError> {
self.lock().unwrap().get(descr)
}
fn all_descrs(&self) -> Result<Vec<origin::Descr>, AllDescrsError> {
self.lock().unwrap().all_descrs()
}
}

View File

@ -9,7 +9,7 @@ use std::str::FromStr;
use std::{future, net, sync}; use std::{future, net, sync};
use crate::error::unexpected; use crate::error::unexpected;
use crate::{domain, origin, service, util}; use crate::{domain, service, util};
type SvcResponse = Result<Response<hyper::body::Body>, String>; type SvcResponse = Result<Response<hyper::body::Body>, String>;
@ -170,23 +170,16 @@ impl<'svc> Service {
false => path, false => path,
}; };
let origin = match self.domain_manager.get_origin(&domain) { let mut buf = Vec::<u8>::new();
Ok(o) => o, match self.domain_manager.read_file_into(&domain, path, &mut buf) {
Err(domain::manager::GetOriginError::NotFound) => { Ok(_) => self.serve_string(200, path, buf),
Err(domain::manager::ReadFileIntoError::DomainNotFound) => {
return self.render_error_page(404, "Domain not found") return self.render_error_page(404, "Domain not found")
} }
Err(domain::manager::GetOriginError::Unexpected(e)) => { Err(domain::manager::ReadFileIntoError::FileNotFound) => {
return self.render_error_page(500, format!("failed to fetch origin: {e}").as_str())
}
};
let mut buf = Vec::<u8>::new();
match origin.read_file_into(path, &mut buf) {
Ok(_) => self.serve_string(200, path, buf),
Err(origin::ReadFileIntoError::FileNotFound) => {
self.render_error_page(404, "File not found") self.render_error_page(404, "File not found")
} }
Err(origin::ReadFileIntoError::Unexpected(e)) => { Err(domain::manager::ReadFileIntoError::Unexpected(e)) => {
self.render_error_page(500, format!("failed to fetch file {path}: {e}").as_str()) self.render_error_page(500, format!("failed to fetch file {path}: {e}").as_str())
} }
} }