Compare commits

..

No commits in common. "773001b158ae47b4bc5788f39c6fe6b11131a07f" and "9c36ae1c7b067ed8f1898f59f28dd99a2bba222c" have entirely different histories.

11 changed files with 107 additions and 80 deletions

View File

@ -93,9 +93,4 @@ Within the shell which opens you can do `cargo run` to start a local instance.
## Roadmap
* Support for AAAA and CNAME records
* Support for more backends than just git repositories, including:
* IPFS/IPNS
* Alternative URLs (reverse proxy)
* Google Drive
* Dropbox
Check out the `src/service/http_tpl/index.html` file for the current roadmap.

View File

@ -163,21 +163,18 @@ impl ManagerImpl {
domain_config_store: DomainConfigStore,
domain_checker: checker::DNSChecker,
acme_manager: Option<AcmeManager>,
) -> sync::Arc<Self> {
) -> sync::Arc<dyn Manager> {
let manager = sync::Arc::new(ManagerImpl {
origin_store: Box::from(origin_store),
domain_config_store: Box::from(domain_config_store),
domain_checker,
domain_checker: domain_checker,
acme_manager: acme_manager
.map(|m| Box::new(m) as Box<dyn acme::manager::Manager + Send + Sync>),
});
task_stack.push_spawn(|canceller| {
let manager = manager.clone();
async move {
manager.sync_origins(canceller).await;
Ok(())
}
async move { Ok(manager.sync_origins(canceller).await) }
});
manager
@ -196,8 +193,9 @@ impl ManagerImpl {
}
}
.for_each(|descr| {
if let Err(err) = self.origin_store.sync(&descr) {
log::error!("Failed to sync store for {:?}: {err}", descr)
if let Err(err) = self.origin_store.sync(descr.clone(), origin::store::Limits {}) {
log::error!("Failed to sync store for {:?}: {err}", descr);
return;
}
});
},
@ -219,7 +217,7 @@ impl Manager for ManagerImpl {
let config = self.domain_config_store.get(domain)?;
let origin = self
.origin_store
.get(&config.origin_descr)
.get(config.origin_descr)
// if there's a config there should be an origin, any error here is unexpected
.or_unexpected()?;
Ok(origin)
@ -254,7 +252,8 @@ impl Manager for ManagerImpl {
.check_domain(&domain, &config_hash)
.await?;
self.origin_store.sync(&config.origin_descr)?;
self.origin_store
.sync(config.origin_descr.clone(), origin::store::Limits {})?;
self.domain_config_store.set(&domain, &config)?;

View File

@ -103,7 +103,7 @@ impl<T, E: error::Error> Mappable<T> for Result<T, E> {
}
}
static OPTION_NONE_ERROR: &str = "expected Some but got None";
static OPTION_NONE_ERROR: &'static str = "expected Some but got None";
impl<T> Mappable<T> for Option<T> {
fn or_unexpected(self) -> Result<T, Error> {

View File

@ -125,8 +125,8 @@ async fn main() {
domain_manager.clone(),
config.domain_checker_target_a,
config.passphrase,
config.http_listen_addr,
config.http_domain,
config.http_listen_addr.clone(),
config.http_domain.clone(),
config
.https_listen_addr
.map(|listen_addr| domani::service::http::HTTPSParams {

View File

@ -17,6 +17,7 @@ pub enum ReadFileIntoError {
/// Describes an origin which has already been synced locally and is available for reading files
/// from.
pub trait Origin {
fn descr(&self) -> &Descr;
fn read_file_into(
&self,
path: &str,

View File

@ -5,6 +5,11 @@ use std::sync;
pub mod git;
pub mod mux;
#[derive(Clone, Copy)]
pub struct Limits {
// TODO storage limits
}
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum SyncError {
#[error("invalid url")]
@ -40,9 +45,9 @@ pub enum AllDescrsError {
pub trait Store {
/// If the origin is of a kind which can be updated, sync will pull down the latest version of
/// the origin into the storage.
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError>;
fn sync(&self, descr: origin::Descr, limits: Limits) -> Result<(), SyncError>;
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError>;
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError>;
fn all_descrs(&self) -> Result<Vec<origin::Descr>, AllDescrsError>;
}
@ -51,11 +56,11 @@ pub fn new_mock() -> sync::Arc<sync::Mutex<MockStore>> {
}
impl Store for sync::Arc<sync::Mutex<MockStore>> {
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr)
fn sync(&self, descr: origin::Descr, limits: Limits) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr, limits)
}
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError> {
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError> {
self.lock().unwrap().get(descr)
}

View File

@ -6,11 +6,16 @@ use std::{collections, fs, io, sync};
#[derive(Clone)]
struct Origin {
descr: origin::Descr,
repo: sync::Arc<gix::ThreadSafeRepository>,
tree_object_id: gix::ObjectId,
}
impl origin::Origin for Origin {
fn descr(&self) -> &origin::Descr {
&self.descr
}
fn read_file_into(
&self,
path: &str,
@ -91,7 +96,7 @@ impl FSStore {
fn get_origin(
&self,
repo: gix::Repository,
descr: &origin::Descr,
descr: origin::Descr,
) -> Result<Origin, GetOriginError> {
let origin::Descr::Git {
ref branch_name, ..
@ -115,12 +120,17 @@ impl FSStore {
.tree();
Ok(Origin {
descr,
repo: sync::Arc::new(repo.into()),
tree_object_id,
})
}
fn sync_inner(&self, descr: &origin::Descr) -> Result<gix::Repository, store::SyncError> {
fn sync_inner(
&self,
descr: &origin::Descr,
_limits: store::Limits,
) -> Result<gix::Repository, store::SyncError> {
use gix::clone::Error as gixCloneErr;
use gix::progress::Discard;
@ -199,7 +209,7 @@ impl FSStore {
}
impl super::Store for FSStore {
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> {
fn sync(&self, descr: origin::Descr, limits: store::Limits) -> Result<(), store::SyncError> {
// attempt to lock this descr for syncing, doing so within a new scope so the mutex
// isn't actually being held for the whole method duration.
let is_already_syncing = {
@ -214,9 +224,9 @@ impl super::Store for FSStore {
return Err(store::SyncError::AlreadyInProgress);
}
let res = self.sync_inner(descr);
let res = self.sync_inner(&descr, limits);
self.sync_guard.lock().unwrap().remove(descr);
self.sync_guard.lock().unwrap().remove(&descr);
let repo = match res {
Ok(repo) => repo,
@ -232,26 +242,26 @@ impl super::Store for FSStore {
// calling this while the sync lock is held isn't ideal, but it's convenient and
// shouldn't be too terrible generally
let origin = self.get_origin(repo, descr).map_err(|e| match e {
let origin = self.get_origin(repo, descr.clone()).map_err(|e| match e {
GetOriginError::InvalidBranchName => store::SyncError::InvalidBranchName,
GetOriginError::Unexpected(e) => store::SyncError::Unexpected(e),
})?;
let mut origins = self.origins.write().unwrap();
(*origins).insert(descr.clone(), sync::Arc::new(origin));
(*origins).insert(descr, sync::Arc::new(origin));
Ok(())
}
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
{
let origins = self.origins.read().unwrap();
if let Some(origin) = origins.get(descr) {
if let Some(origin) = origins.get(&descr) {
return Ok(origin.clone());
}
}
let repo_path = self.repo_path(descr);
let repo_path = self.repo_path(&descr);
fs::read_dir(&repo_path).map_err(|e| match e.kind() {
io::ErrorKind::NotFound => store::GetError::NotFound,
@ -263,18 +273,18 @@ impl super::Store for FSStore {
let repo = gix::open(&repo_path)
.map_unexpected_while(|| format!("opening {} as git repo", repo_path.display()))?;
let origin = self.get_origin(repo, descr).map_err(|e| match e {
let origin = self.get_origin(repo, descr.clone()).map_err(|e| match e {
// it's not expected that the branch name is invalid at this point, it must have
// existed for sync to have been successful.
GetOriginError::InvalidBranchName => e.into_unexpected().into(),
GetOriginError::Unexpected(e) => store::GetError::Unexpected(e),
})?;
let origin = sync::Arc::new(origin);
let origin = sync::Arc::new(origin.clone());
let mut origins = self.origins.write().unwrap();
(*origins).insert(descr.clone(), origin.clone());
(*origins).insert(descr, origin.clone());
Ok(origin)
}
@ -332,17 +342,25 @@ mod tests {
branch_name: String::from("some_other_branch"),
};
let limits = store::Limits {};
let store = super::FSStore::new(tmp_dir.path().to_path_buf()).expect("store created");
store.sync(&descr).expect("sync should succeed");
store.sync(&descr).expect("second sync should succeed");
store
.sync(descr.clone(), limits)
.expect("sync should succeed");
store
.sync(descr.clone(), limits)
.expect("second sync should succeed");
assert!(matches!(
store.get(&other_descr),
store.get(other_descr),
Err::<_, store::GetError>(store::GetError::NotFound),
));
let origin = store.get(&descr).expect("origin retrieved");
let origin = store.get(descr.clone()).expect("origin retrieved");
assert_eq!(&descr, origin.descr());
let assert_write = |path: &str| {
let mut into: Vec<u8> = vec![];

View File

@ -26,14 +26,14 @@ where
S: store::Store + 'static,
F: Fn(&origin::Descr) -> Option<S> + Sync + Send,
{
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> {
(self.mapping_fn)(descr)
fn sync(&self, descr: origin::Descr, limits: store::Limits) -> Result<(), store::SyncError> {
(self.mapping_fn)(&descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.sync(descr)
.sync(descr, limits)
}
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
(self.mapping_fn)(descr)
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
(self.mapping_fn)(&descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.get(descr)
}
@ -52,6 +52,7 @@ where
#[cfg(test)]
mod tests {
use crate::origin::{self, store};
use mockall::predicate;
use std::sync;
struct Harness {
@ -111,33 +112,30 @@ mod tests {
fn sync() {
let h = Harness::new();
{
let descr_a = h.descr_a.clone();
h.store_a
.lock()
.unwrap()
.expect_sync()
.withf(move |descr: &origin::Descr| descr == &descr_a)
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
}
h.store_a
.lock()
.unwrap()
.expect_sync()
.with(predicate::eq(h.descr_a.clone()), predicate::always())
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
assert_eq!(Ok(()), h.store.sync(&h.descr_a));
assert_eq!(Ok(()), h.store.sync(h.descr_a.clone(), store::Limits {}));
{
let descr_b = h.descr_b.clone();
h.store_b
.lock()
.unwrap()
.expect_sync()
.withf(move |descr: &origin::Descr| descr == &descr_b)
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
}
h.store_b
.lock()
.unwrap()
.expect_sync()
.with(predicate::eq(h.descr_b.clone()), predicate::always())
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
assert_eq!(Ok(()), h.store.sync(&h.descr_b));
assert_eq!(Ok(()), h.store.sync(h.descr_b.clone(), store::Limits {}));
assert!(h.store.sync(&h.descr_unknown).is_err());
assert!(h
.store
.sync(h.descr_unknown.clone(), store::Limits {})
.is_err());
}
#[test]

View File

@ -68,7 +68,7 @@ pub fn new(
});
}
service
return service;
}
#[derive(Serialize)]

View File

@ -40,3 +40,21 @@ been set up.</p>
individuals for their community of friends and family. By making it super easy
to set up a domain we can help our non-technical folk own their own slice of
the internet, the way it was always intended.</p>
<h2>Roadmap</h2>
<p>Domani is very much a work in progress. The following functionality is
planned but not yet implemented:</p>
<ul>
<li>Support for AAAA and CNAME records</li>
<li>
Support for more backends than just git repositories, including:
<ul>
<li>IPFS/IPNS Hash</li>
<li>Alternative URL (reverse proxy)</li>
<li>Google Drive / Dropbox</li>
</ul>
</li>
<li>Scalable backend which requires only an S3 compatible database</li>
</ul>

View File

@ -62,17 +62,10 @@ where
self.wait_group.reverse();
for fut in self.wait_group {
fut.await?;
if let Err(err) = fut.await {
return Err(err);
}
}
Ok(())
}
}
impl<E> Default for TaskStack<E>
where
E: error::Error + Send + 'static,
{
fn default() -> Self {
Self::new()
}
}