Cargo fmt

This commit is contained in:
Alex Auvolat 2021-02-23 18:46:25 +01:00
parent 6e6f7e8555
commit 40763fd749
12 changed files with 67 additions and 32 deletions

View File

@ -160,10 +160,15 @@ async fn handler_inner(garage: Arc<Garage>, req: Request<Body>) -> Result<Respon
} else if params.contains_key(&"uploadid".to_string()) { } else if params.contains_key(&"uploadid".to_string()) {
// CompleteMultipartUpload call // CompleteMultipartUpload call
let upload_id = params.get("uploadid").unwrap(); let upload_id = params.get("uploadid").unwrap();
Ok( Ok(handle_complete_multipart_upload(
handle_complete_multipart_upload(garage, req, &bucket, &key, upload_id, content_sha256) garage,
.await?, req,
&bucket,
&key,
upload_id,
content_sha256,
) )
.await?)
} else { } else {
Err(Error::BadRequest(format!( Err(Error::BadRequest(format!(
"Not a CreateMultipartUpload call, what is it?" "Not a CreateMultipartUpload call, what is it?"

View File

@ -104,6 +104,6 @@ pub async fn handle_copy(
writeln!(&mut xml, "</CopyObjectResult>").unwrap(); writeln!(&mut xml, "</CopyObjectResult>").unwrap();
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(Body::from(xml.into_bytes()))?)
} }

View File

@ -123,8 +123,8 @@ pub async fn handle_delete_objects(
writeln!(&mut retxml, "</DeleteObjectsOutput>").unwrap(); writeln!(&mut retxml, "</DeleteObjectsOutput>").unwrap();
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(retxml.into_bytes()))?) .body(Body::from(retxml.into_bytes()))?)
} }
struct DeleteRequest { struct DeleteRequest {

View File

@ -229,10 +229,20 @@ pub async fn handle_list(
} else { } else {
// TODO: are these supposed to be urlencoded when encoding-type is URL?? // TODO: are these supposed to be urlencoded when encoding-type is URL??
if let Some(mkr) = &query.marker { if let Some(mkr) = &query.marker {
writeln!(&mut xml, "\t<Marker>{}</Marker>", xml_encode_key(mkr, query.urlencode_resp)).unwrap(); writeln!(
&mut xml,
"\t<Marker>{}</Marker>",
xml_encode_key(mkr, query.urlencode_resp)
)
.unwrap();
} }
if let Some(next_marker) = truncated { if let Some(next_marker) = truncated {
writeln!(&mut xml, "\t<NextMarker>{}</NextMarker>", xml_encode_key(&next_marker, query.urlencode_resp)).unwrap(); writeln!(
&mut xml,
"\t<NextMarker>{}</NextMarker>",
xml_encode_key(&next_marker, query.urlencode_resp)
)
.unwrap();
} }
} }
@ -272,6 +282,6 @@ pub async fn handle_list(
debug!("{}", xml); debug!("{}", xml);
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(Body::from(xml.into_bytes()))?)
} }

View File

@ -17,8 +17,8 @@ use garage_model::garage::Garage;
use garage_model::object_table::*; use garage_model::object_table::*;
use garage_model::version_table::*; use garage_model::version_table::*;
use crate::error::*;
use crate::encoding::*; use crate::encoding::*;
use crate::error::*;
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
pub async fn handle_put( pub async fn handle_put(
@ -427,8 +427,12 @@ pub async fn handle_complete_multipart_upload(
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
let body_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?; let body_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?;
let body_list_of_parts = parse_complete_multpart_upload_body(&body_xml).ok_or_bad_request("Invalid CompleteMultipartUpload XML")?; let body_list_of_parts = parse_complete_multpart_upload_body(&body_xml)
debug!("CompleteMultipartUpload list of parts: {:?}", body_list_of_parts); .ok_or_bad_request("Invalid CompleteMultipartUpload XML")?;
debug!(
"CompleteMultipartUpload list of parts: {:?}",
body_list_of_parts
);
let version_uuid = decode_upload_id(upload_id)?; let version_uuid = decode_upload_id(upload_id)?;
@ -461,10 +465,16 @@ pub async fn handle_complete_multipart_upload(
// Check that the list of parts they gave us corresponds to the parts we have here // Check that the list of parts they gave us corresponds to the parts we have here
// TODO: check MD5 sum of all uploaded parts? but that would mean we have to store them somewhere... // TODO: check MD5 sum of all uploaded parts? but that would mean we have to store them somewhere...
let mut parts = version.blocks().iter().map(|x| x.part_number) let mut parts = version
.blocks()
.iter()
.map(|x| x.part_number)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
parts.dedup(); parts.dedup();
let same_parts = body_list_of_parts.iter().map(|x| &x.part_number).eq(parts.iter()); let same_parts = body_list_of_parts
.iter()
.map(|x| &x.part_number)
.eq(parts.iter());
if !same_parts { if !same_parts {
return Err(Error::BadRequest(format!("We don't have the same parts"))); return Err(Error::BadRequest(format!("We don't have the same parts")));
} }
@ -604,7 +614,9 @@ struct CompleteMultipartUploadPart {
part_number: u64, part_number: u64,
} }
fn parse_complete_multpart_upload_body(xml: &roxmltree::Document) -> Option<Vec<CompleteMultipartUploadPart>> { fn parse_complete_multpart_upload_body(
xml: &roxmltree::Document,
) -> Option<Vec<CompleteMultipartUploadPart>> {
let mut parts = vec![]; let mut parts = vec![];
let root = xml.root(); let root = xml.root();
@ -616,8 +628,11 @@ fn parse_complete_multpart_upload_body(xml: &roxmltree::Document) -> Option<Vec<
for item in cmu.children() { for item in cmu.children() {
if item.has_tag_name("Part") { if item.has_tag_name("Part") {
let etag = item.children().find(|e| e.has_tag_name("ETag"))?.text()?; let etag = item.children().find(|e| e.has_tag_name("ETag"))?.text()?;
let part_number = item.children().find(|e| e.has_tag_name("PartNumber"))?.text()?; let part_number = item
parts.push(CompleteMultipartUploadPart{ .children()
.find(|e| e.has_tag_name("PartNumber"))?
.text()?;
parts.push(CompleteMultipartUploadPart {
etag: etag.trim_matches('"').to_string(), etag: etag.trim_matches('"').to_string(),
part_number: part_number.parse().ok()?, part_number: part_number.parse().ok()?,
}); });

View File

@ -295,9 +295,12 @@ fn canonical_query_string(uri: &hyper::Uri) -> String {
} }
pub fn verify_signed_content(content_sha256: Option<Hash>, body: &[u8]) -> Result<(), Error> { pub fn verify_signed_content(content_sha256: Option<Hash>, body: &[u8]) -> Result<(), Error> {
let expected_sha256 = content_sha256.ok_or_bad_request("Request content hash not signed, aborting.")?; let expected_sha256 =
content_sha256.ok_or_bad_request("Request content hash not signed, aborting.")?;
if expected_sha256 != sha256sum(body) { if expected_sha256 != sha256sum(body) {
return Err(Error::BadRequest(format!("Request content hash does not match signed hash"))); return Err(Error::BadRequest(format!(
"Request content hash does not match signed hash"
)));
} }
Ok(()) Ok(())
} }

View File

@ -4,9 +4,8 @@ extern crate log;
pub mod consul; pub mod consul;
pub(crate) mod tls_util; pub(crate) mod tls_util;
pub mod ring;
pub mod membership; pub mod membership;
pub mod ring;
pub mod rpc_client; pub mod rpc_client;
pub mod rpc_server; pub mod rpc_server;

View File

@ -1,6 +1,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::io::{Read, Write};
use std::fmt::Write as FmtWrite; use std::fmt::Write as FmtWrite;
use std::io::{Read, Write};
use std::net::{IpAddr, SocketAddr}; use std::net::{IpAddr, SocketAddr};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
@ -20,9 +20,9 @@ use garage_util::data::*;
use garage_util::error::Error; use garage_util::error::Error;
use crate::consul::get_consul_nodes; use crate::consul::get_consul_nodes;
use crate::ring::*;
use crate::rpc_client::*; use crate::rpc_client::*;
use crate::rpc_server::*; use crate::rpc_server::*;
use crate::ring::*;
const PING_INTERVAL: Duration = Duration::from_secs(10); const PING_INTERVAL: Duration = Duration::from_secs(10);
const CONSUL_INTERVAL: Duration = Duration::from_secs(60); const CONSUL_INTERVAL: Duration = Duration::from_secs(60);

View File

@ -4,7 +4,6 @@ use serde::{Deserialize, Serialize};
use garage_util::data::*; use garage_util::data::*;
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NetworkConfig { pub struct NetworkConfig {
pub members: HashMap<UUID, NetworkConfigEntry>, pub members: HashMap<UUID, NetworkConfigEntry>,
@ -13,7 +12,7 @@ pub struct NetworkConfig {
impl NetworkConfig { impl NetworkConfig {
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
Self{ Self {
members: HashMap::new(), members: HashMap::new(),
version: 0, version: 0,
} }
@ -27,7 +26,6 @@ pub struct NetworkConfigEntry {
pub tag: String, pub tag: String,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Ring { pub struct Ring {
pub config: NetworkConfig, pub config: NetworkConfig,
@ -53,7 +51,12 @@ impl Ring {
if !datacenters.contains(datacenter) { if !datacenters.contains(datacenter) {
datacenters.push(datacenter.to_string()); datacenters.push(datacenter.to_string());
} }
let datacenter_idx = datacenters.iter().enumerate().find(|(_, dc)| *dc == datacenter).unwrap().0; let datacenter_idx = datacenters
.iter()
.enumerate()
.find(|(_, dc)| *dc == datacenter)
.unwrap()
.0;
for i in 0..config.n_tokens { for i in 0..config.n_tokens {
let location = sha256sum(format!("{} {}", hex::encode(&id), i).as_bytes()); let location = sha256sum(format!("{} {}", hex::encode(&id), i).as_bytes());

View File

@ -1,7 +1,7 @@
use arc_swap::ArcSwapOption; use arc_swap::ArcSwapOption;
use std::sync::Arc; use std::sync::Arc;
use garage_rpc::membership::{System}; use garage_rpc::membership::System;
use garage_rpc::ring::Ring; use garage_rpc::ring::Ring;
use garage_util::data::*; use garage_util::data::*;

View File

@ -1,4 +1,4 @@
use garage_rpc::membership::{System}; use garage_rpc::membership::System;
use garage_rpc::ring::Ring; use garage_rpc::ring::Ring;
use garage_util::data::*; use garage_util::data::*;

View File

@ -99,8 +99,8 @@ pub fn blake2sum(data: &[u8]) -> Hash {
pub type FastHash = u64; pub type FastHash = u64;
pub fn fasthash(data: &[u8]) -> FastHash { pub fn fasthash(data: &[u8]) -> FastHash {
use std::hash::Hasher;
use fasthash::{xx::Hasher64, FastHasher}; use fasthash::{xx::Hasher64, FastHasher};
use std::hash::Hasher;
let mut h = Hasher64::new(); let mut h = Hasher64::new();
h.write(data); h.write(data);