Compare commits

..

3 Commits

Author SHA1 Message Date
Brian Picciano
773001b158 Changes suggested by clippy 2023-07-04 19:42:12 +02:00
Brian Picciano
6863694293 Pass descr around as a reference, generally 2023-07-04 19:33:03 +02:00
Brian Picciano
95f7f97716 move roadmap to README 2023-07-04 19:09:07 +02:00
11 changed files with 80 additions and 107 deletions

View File

@ -93,4 +93,9 @@ Within the shell which opens you can do `cargo run` to start a local instance.
## Roadmap
Check out the `src/service/http_tpl/index.html` file for the current roadmap.
* Support for AAAA and CNAME records
* Support for more backends than just git repositories, including:
* IPFS/IPNS
* Alternative URLs (reverse proxy)
* Google Drive
* Dropbox

View File

@ -163,18 +163,21 @@ impl ManagerImpl {
domain_config_store: DomainConfigStore,
domain_checker: checker::DNSChecker,
acme_manager: Option<AcmeManager>,
) -> sync::Arc<dyn Manager> {
) -> sync::Arc<Self> {
let manager = sync::Arc::new(ManagerImpl {
origin_store: Box::from(origin_store),
domain_config_store: Box::from(domain_config_store),
domain_checker: domain_checker,
domain_checker,
acme_manager: acme_manager
.map(|m| Box::new(m) as Box<dyn acme::manager::Manager + Send + Sync>),
});
task_stack.push_spawn(|canceller| {
let manager = manager.clone();
async move { Ok(manager.sync_origins(canceller).await) }
async move {
manager.sync_origins(canceller).await;
Ok(())
}
});
manager
@ -193,9 +196,8 @@ impl ManagerImpl {
}
}
.for_each(|descr| {
if let Err(err) = self.origin_store.sync(descr.clone(), origin::store::Limits {}) {
log::error!("Failed to sync store for {:?}: {err}", descr);
return;
if let Err(err) = self.origin_store.sync(&descr) {
log::error!("Failed to sync store for {:?}: {err}", descr)
}
});
},
@ -217,7 +219,7 @@ impl Manager for ManagerImpl {
let config = self.domain_config_store.get(domain)?;
let origin = self
.origin_store
.get(config.origin_descr)
.get(&config.origin_descr)
// if there's a config there should be an origin, any error here is unexpected
.or_unexpected()?;
Ok(origin)
@ -252,8 +254,7 @@ impl Manager for ManagerImpl {
.check_domain(&domain, &config_hash)
.await?;
self.origin_store
.sync(config.origin_descr.clone(), origin::store::Limits {})?;
self.origin_store.sync(&config.origin_descr)?;
self.domain_config_store.set(&domain, &config)?;

View File

@ -103,7 +103,7 @@ impl<T, E: error::Error> Mappable<T> for Result<T, E> {
}
}
static OPTION_NONE_ERROR: &'static str = "expected Some but got None";
static OPTION_NONE_ERROR: &str = "expected Some but got None";
impl<T> Mappable<T> for Option<T> {
fn or_unexpected(self) -> Result<T, Error> {

View File

@ -125,8 +125,8 @@ async fn main() {
domain_manager.clone(),
config.domain_checker_target_a,
config.passphrase,
config.http_listen_addr.clone(),
config.http_domain.clone(),
config.http_listen_addr,
config.http_domain,
config
.https_listen_addr
.map(|listen_addr| domani::service::http::HTTPSParams {

View File

@ -17,7 +17,6 @@ pub enum ReadFileIntoError {
/// Describes an origin which has already been synced locally and is available for reading files
/// from.
pub trait Origin {
fn descr(&self) -> &Descr;
fn read_file_into(
&self,
path: &str,

View File

@ -5,11 +5,6 @@ use std::sync;
pub mod git;
pub mod mux;
#[derive(Clone, Copy)]
pub struct Limits {
// TODO storage limits
}
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum SyncError {
#[error("invalid url")]
@ -45,9 +40,9 @@ pub enum AllDescrsError {
pub trait Store {
/// If the origin is of a kind which can be updated, sync will pull down the latest version of
/// the origin into the storage.
fn sync(&self, descr: origin::Descr, limits: Limits) -> Result<(), SyncError>;
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError>;
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError>;
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError>;
fn all_descrs(&self) -> Result<Vec<origin::Descr>, AllDescrsError>;
}
@ -56,11 +51,11 @@ pub fn new_mock() -> sync::Arc<sync::Mutex<MockStore>> {
}
impl Store for sync::Arc<sync::Mutex<MockStore>> {
fn sync(&self, descr: origin::Descr, limits: Limits) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr, limits)
fn sync(&self, descr: &origin::Descr) -> Result<(), SyncError> {
self.lock().unwrap().sync(descr)
}
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError> {
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, GetError> {
self.lock().unwrap().get(descr)
}

View File

@ -6,16 +6,11 @@ use std::{collections, fs, io, sync};
#[derive(Clone)]
struct Origin {
descr: origin::Descr,
repo: sync::Arc<gix::ThreadSafeRepository>,
tree_object_id: gix::ObjectId,
}
impl origin::Origin for Origin {
fn descr(&self) -> &origin::Descr {
&self.descr
}
fn read_file_into(
&self,
path: &str,
@ -96,7 +91,7 @@ impl FSStore {
fn get_origin(
&self,
repo: gix::Repository,
descr: origin::Descr,
descr: &origin::Descr,
) -> Result<Origin, GetOriginError> {
let origin::Descr::Git {
ref branch_name, ..
@ -120,17 +115,12 @@ impl FSStore {
.tree();
Ok(Origin {
descr,
repo: sync::Arc::new(repo.into()),
tree_object_id,
})
}
fn sync_inner(
&self,
descr: &origin::Descr,
_limits: store::Limits,
) -> Result<gix::Repository, store::SyncError> {
fn sync_inner(&self, descr: &origin::Descr) -> Result<gix::Repository, store::SyncError> {
use gix::clone::Error as gixCloneErr;
use gix::progress::Discard;
@ -209,7 +199,7 @@ impl FSStore {
}
impl super::Store for FSStore {
fn sync(&self, descr: origin::Descr, limits: store::Limits) -> Result<(), store::SyncError> {
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> {
// attempt to lock this descr for syncing, doing so within a new scope so the mutex
// isn't actually being held for the whole method duration.
let is_already_syncing = {
@ -224,9 +214,9 @@ impl super::Store for FSStore {
return Err(store::SyncError::AlreadyInProgress);
}
let res = self.sync_inner(&descr, limits);
let res = self.sync_inner(descr);
self.sync_guard.lock().unwrap().remove(&descr);
self.sync_guard.lock().unwrap().remove(descr);
let repo = match res {
Ok(repo) => repo,
@ -242,26 +232,26 @@ impl super::Store for FSStore {
// calling this while the sync lock is held isn't ideal, but it's convenient and
// shouldn't be too terrible generally
let origin = self.get_origin(repo, descr.clone()).map_err(|e| match e {
let origin = self.get_origin(repo, descr).map_err(|e| match e {
GetOriginError::InvalidBranchName => store::SyncError::InvalidBranchName,
GetOriginError::Unexpected(e) => store::SyncError::Unexpected(e),
})?;
let mut origins = self.origins.write().unwrap();
(*origins).insert(descr, sync::Arc::new(origin));
(*origins).insert(descr.clone(), sync::Arc::new(origin));
Ok(())
}
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
{
let origins = self.origins.read().unwrap();
if let Some(origin) = origins.get(&descr) {
if let Some(origin) = origins.get(descr) {
return Ok(origin.clone());
}
}
let repo_path = self.repo_path(&descr);
let repo_path = self.repo_path(descr);
fs::read_dir(&repo_path).map_err(|e| match e.kind() {
io::ErrorKind::NotFound => store::GetError::NotFound,
@ -273,18 +263,18 @@ impl super::Store for FSStore {
let repo = gix::open(&repo_path)
.map_unexpected_while(|| format!("opening {} as git repo", repo_path.display()))?;
let origin = self.get_origin(repo, descr.clone()).map_err(|e| match e {
let origin = self.get_origin(repo, descr).map_err(|e| match e {
// it's not expected that the branch name is invalid at this point, it must have
// existed for sync to have been successful.
GetOriginError::InvalidBranchName => e.into_unexpected().into(),
GetOriginError::Unexpected(e) => store::GetError::Unexpected(e),
})?;
let origin = sync::Arc::new(origin.clone());
let origin = sync::Arc::new(origin);
let mut origins = self.origins.write().unwrap();
(*origins).insert(descr, origin.clone());
(*origins).insert(descr.clone(), origin.clone());
Ok(origin)
}
@ -342,25 +332,17 @@ mod tests {
branch_name: String::from("some_other_branch"),
};
let limits = store::Limits {};
let store = super::FSStore::new(tmp_dir.path().to_path_buf()).expect("store created");
store
.sync(descr.clone(), limits)
.expect("sync should succeed");
store
.sync(descr.clone(), limits)
.expect("second sync should succeed");
store.sync(&descr).expect("sync should succeed");
store.sync(&descr).expect("second sync should succeed");
assert!(matches!(
store.get(other_descr),
store.get(&other_descr),
Err::<_, store::GetError>(store::GetError::NotFound),
));
let origin = store.get(descr.clone()).expect("origin retrieved");
assert_eq!(&descr, origin.descr());
let origin = store.get(&descr).expect("origin retrieved");
let assert_write = |path: &str| {
let mut into: Vec<u8> = vec![];

View File

@ -26,14 +26,14 @@ where
S: store::Store + 'static,
F: Fn(&origin::Descr) -> Option<S> + Sync + Send,
{
fn sync(&self, descr: origin::Descr, limits: store::Limits) -> Result<(), store::SyncError> {
(self.mapping_fn)(&descr)
fn sync(&self, descr: &origin::Descr) -> Result<(), store::SyncError> {
(self.mapping_fn)(descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.sync(descr, limits)
.sync(descr)
}
fn get(&self, descr: origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
(self.mapping_fn)(&descr)
fn get(&self, descr: &origin::Descr) -> Result<sync::Arc<dyn origin::Origin>, store::GetError> {
(self.mapping_fn)(descr)
.or_unexpected_while(format!("mapping {:?} to store", &descr))?
.get(descr)
}
@ -52,7 +52,6 @@ where
#[cfg(test)]
mod tests {
use crate::origin::{self, store};
use mockall::predicate;
use std::sync;
struct Harness {
@ -112,30 +111,33 @@ mod tests {
fn sync() {
let h = Harness::new();
{
let descr_a = h.descr_a.clone();
h.store_a
.lock()
.unwrap()
.expect_sync()
.with(predicate::eq(h.descr_a.clone()), predicate::always())
.withf(move |descr: &origin::Descr| descr == &descr_a)
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
}
assert_eq!(Ok(()), h.store.sync(h.descr_a.clone(), store::Limits {}));
assert_eq!(Ok(()), h.store.sync(&h.descr_a));
{
let descr_b = h.descr_b.clone();
h.store_b
.lock()
.unwrap()
.expect_sync()
.with(predicate::eq(h.descr_b.clone()), predicate::always())
.withf(move |descr: &origin::Descr| descr == &descr_b)
.times(1)
.return_const(Ok::<(), store::SyncError>(()));
}
assert_eq!(Ok(()), h.store.sync(h.descr_b.clone(), store::Limits {}));
assert_eq!(Ok(()), h.store.sync(&h.descr_b));
assert!(h
.store
.sync(h.descr_unknown.clone(), store::Limits {})
.is_err());
assert!(h.store.sync(&h.descr_unknown).is_err());
}
#[test]

View File

@ -68,7 +68,7 @@ pub fn new(
});
}
return service;
service
}
#[derive(Serialize)]

View File

@ -40,21 +40,3 @@ been set up.</p>
individuals for their community of friends and family. By making it super easy
to set up a domain we can help our non-technical folk own their own slice of
the internet, the way it was always intended.</p>
<h2>Roadmap</h2>
<p>Domani is very much a work in progress. The following functionality is
planned but not yet implemented:</p>
<ul>
<li>Support for AAAA and CNAME records</li>
<li>
Support for more backends than just git repositories, including:
<ul>
<li>IPFS/IPNS Hash</li>
<li>Alternative URL (reverse proxy)</li>
<li>Google Drive / Dropbox</li>
</ul>
</li>
<li>Scalable backend which requires only an S3 compatible database</li>
</ul>

View File

@ -62,10 +62,17 @@ where
self.wait_group.reverse();
for fut in self.wait_group {
if let Err(err) = fut.await {
return Err(err);
}
fut.await?;
}
Ok(())
}
}
impl<E> Default for TaskStack<E>
where
E: error::Error + Send + 'static,
{
fn default() -> Self {
Self::new()
}
}