Improved XML serialization

- Use quick_xml and serde for all XML response returned by the S3 API.
- Include tests for all structs used to generate XML
- Remove old manual XML escaping function which was unsafe
This commit is contained in:
Alex Auvolat 2021-05-03 22:45:42 +02:00
parent e4b9e4e24d
commit 6ccffc3162
No known key found for this signature in database
GPG Key ID: EDABF9711E244EB1
10 changed files with 727 additions and 280 deletions

View File

@ -1,13 +1,5 @@
//! Module containing various helpers for encoding //! Module containing various helpers for encoding
/// Escape &str for xml inclusion
pub fn xml_escape(s: &str) -> String {
s.replace("&", "&")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;")
}
/// Encode &str for use in a URI /// Encode &str for use in a URI
pub fn uri_encode(string: &str, encode_slash: bool) -> String { pub fn uri_encode(string: &str, encode_slash: bool) -> String {
let mut result = String::with_capacity(string.len() * 2); let mut result = String::with_capacity(string.len() * 2);
@ -28,12 +20,3 @@ pub fn uri_encode(string: &str, encode_slash: bool) -> String {
} }
result result
} }
/// Encode &str either as an uri, or a valid string for xml inclusion
pub fn xml_encode_key(k: &str, urlencode: bool) -> String {
if urlencode {
uri_encode(k, true)
} else {
xml_escape(k)
}
}

View File

@ -1,11 +1,9 @@
use std::fmt::Write;
use err_derive::Error; use err_derive::Error;
use hyper::StatusCode; use hyper::StatusCode;
use garage_util::error::Error as GarageError; use garage_util::error::Error as GarageError;
use crate::encoding::*; use crate::s3_xml;
/// Errors of this crate /// Errors of this crate
#[derive(Debug, Error)] #[derive(Debug, Error)]
@ -104,15 +102,22 @@ impl Error {
} }
pub fn aws_xml(&self, garage_region: &str, path: &str) -> String { pub fn aws_xml(&self, garage_region: &str, path: &str) -> String {
let mut xml = String::new(); let error = s3_xml::Error {
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); code: s3_xml::Value(self.aws_code().to_string()),
writeln!(&mut xml, "<Error>").unwrap(); message: s3_xml::Value(format!("{}", self)),
writeln!(&mut xml, "\t<Code>{}</Code>", self.aws_code()).unwrap(); resource: Some(s3_xml::Value(path.to_string())),
writeln!(&mut xml, "\t<Message>{}</Message>", self).unwrap(); region: Some(s3_xml::Value(garage_region.to_string())),
writeln!(&mut xml, "\t<Resource>{}</Resource>", xml_escape(path)).unwrap(); };
writeln!(&mut xml, "\t<Region>{}</Region>", garage_region).unwrap(); s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
writeln!(&mut xml, "</Error>").unwrap(); r#"
xml <?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>InternalError</Code>
<Message>XML encoding of error failed</Message>
</Error>
"#
.into()
})
} }
} }

View File

@ -18,3 +18,4 @@ mod s3_delete;
pub mod s3_get; pub mod s3_get;
mod s3_list; mod s3_list;
mod s3_put; mod s3_put;
mod s3_xml;

View File

@ -1,73 +1,20 @@
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Response}; use hyper::{Body, Response};
use quick_xml::se::to_string;
use serde::Serialize;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_model::key_table::Key; use garage_model::key_table::Key;
use garage_util::time::*; use garage_util::time::*;
use crate::error::*; use crate::error::*;
use crate::s3_xml;
#[derive(Debug, Serialize, PartialEq)]
struct CreationDate {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Serialize, PartialEq)]
struct Name {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Serialize, PartialEq)]
struct Bucket {
#[serde(rename = "CreationDate")]
pub creation_date: CreationDate,
#[serde(rename = "Name")]
pub name: Name,
}
#[derive(Debug, Serialize, PartialEq)]
struct DisplayName {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Serialize, PartialEq)]
struct Id {
#[serde(rename = "$value")]
pub body: String,
}
#[derive(Debug, Serialize, PartialEq)]
struct Owner {
#[serde(rename = "DisplayName")]
display_name: DisplayName,
#[serde(rename = "ID")]
id: Id,
}
#[derive(Debug, Serialize, PartialEq)]
struct BucketList {
#[serde(rename = "Bucket")]
pub entries: Vec<Bucket>,
}
#[derive(Debug, Serialize, PartialEq)]
struct ListAllMyBucketsResult {
#[serde(rename = "Buckets")]
buckets: BucketList,
#[serde(rename = "Owner")]
owner: Owner,
}
pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>, Error> { pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>, Error> {
let mut xml = String::new(); let loc = s3_xml::LocationConstraint {
xmlns: (),
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); region: garage.config.s3_api.s3_region.to_string(),
writeln!( };
&mut xml, let xml = s3_xml::to_xml_with_header(&loc)?;
r#"<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{}</LocationConstraint>"#,
garage.config.s3_api.s3_region
)
.unwrap();
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
@ -75,34 +22,25 @@ pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>,
} }
pub fn handle_list_buckets(api_key: &Key) -> Result<Response<Body>, Error> { pub fn handle_list_buckets(api_key: &Key) -> Result<Response<Body>, Error> {
let list_buckets = ListAllMyBucketsResult { let list_buckets = s3_xml::ListAllMyBucketsResult {
owner: Owner { owner: s3_xml::Owner {
display_name: DisplayName { display_name: s3_xml::Value(api_key.name.get().to_string()),
body: api_key.name.get().to_string(), id: s3_xml::Value(api_key.key_id.to_string()),
},
id: Id {
body: api_key.key_id.to_string(),
},
}, },
buckets: BucketList { buckets: s3_xml::BucketList {
entries: api_key entries: api_key
.authorized_buckets .authorized_buckets
.items() .items()
.iter() .iter()
.map(|(name, ts, _)| Bucket { .map(|(name, ts, _)| s3_xml::Bucket {
creation_date: CreationDate { creation_date: s3_xml::Value(msec_to_rfc3339(*ts)),
body: msec_to_rfc3339(*ts), name: s3_xml::Value(name.to_string()),
},
name: Name {
body: name.to_string(),
},
}) })
.collect(), .collect(),
}, },
}; };
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string(); let xml = s3_xml::to_xml_with_header(&list_buckets)?;
xml.push_str(&to_string(&list_buckets)?);
trace!("xml: {}", xml); trace!("xml: {}", xml);
Ok(Response::builder() Ok(Response::builder()

View File

@ -1,4 +1,3 @@
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
@ -14,6 +13,7 @@ use garage_model::version_table::*;
use crate::error::*; use crate::error::*;
use crate::s3_put::get_headers; use crate::s3_put::get_headers;
use crate::s3_xml;
pub async fn handle_copy( pub async fn handle_copy(
garage: Arc<Garage>, garage: Arc<Garage>,
@ -61,6 +61,8 @@ pub async fn handle_copy(
_ => old_meta.clone(), _ => old_meta.clone(),
}; };
let etag = new_meta.etag.to_string();
// Save object copy // Save object copy
match source_last_state { match source_last_state {
ObjectVersionData::DeleteMarker => unreachable!(), ObjectVersionData::DeleteMarker => unreachable!(),
@ -158,13 +160,13 @@ pub async fn handle_copy(
} }
let last_modified = msec_to_rfc3339(new_timestamp); let last_modified = msec_to_rfc3339(new_timestamp);
let mut xml = String::new(); let result = s3_xml::CopyObjectResult {
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); last_modified: s3_xml::Value(last_modified),
writeln!(&mut xml, r#"<CopyObjectResult>"#).unwrap(); etag: s3_xml::Value(etag),
writeln!(&mut xml, "\t<LastModified>{}</LastModified>", last_modified).unwrap(); };
writeln!(&mut xml, "</CopyObjectResult>").unwrap(); let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(Body::from(xml))?)
} }

View File

@ -1,4 +1,3 @@
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Request, Response}; use hyper::{Body, Request, Response};
@ -9,8 +8,8 @@ use garage_util::time::*;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_model::object_table::*; use garage_model::object_table::*;
use crate::encoding::*;
use crate::error::*; use crate::error::*;
use crate::s3_xml;
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
async fn handle_delete_internal( async fn handle_delete_internal(
@ -85,13 +84,8 @@ pub async fn handle_delete_objects(
let cmd_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?; let cmd_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?;
let cmd = parse_delete_objects_xml(&cmd_xml).ok_or_bad_request("Invalid delete XML query")?; let cmd = parse_delete_objects_xml(&cmd_xml).ok_or_bad_request("Invalid delete XML query")?;
let mut retxml = String::new(); let mut ret_deleted = Vec::new();
writeln!(&mut retxml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); let mut ret_errors = Vec::new();
writeln!(
&mut retxml,
r#"<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
)
.unwrap();
for obj in cmd.objects.iter() { for obj in cmd.objects.iter() {
match handle_delete_internal(&garage, bucket, &obj.key).await { match handle_delete_internal(&garage, bucket, &obj.key).await {
@ -99,42 +93,32 @@ pub async fn handle_delete_objects(
if cmd.quiet { if cmd.quiet {
continue; continue;
} }
writeln!(&mut retxml, "\t<Deleted>").unwrap(); ret_deleted.push(s3_xml::Deleted {
writeln!(&mut retxml, "\t\t<Key>{}</Key>", xml_escape(&obj.key)).unwrap(); key: s3_xml::Value(obj.key.clone()),
writeln!( version_id: s3_xml::Value(hex::encode(deleted_version)),
&mut retxml, delete_marker_version_id: s3_xml::Value(hex::encode(delete_marker_version)),
"\t\t<VersionId>{}</VersionId>", });
hex::encode(deleted_version)
)
.unwrap();
writeln!(
&mut retxml,
"\t\t<DeleteMarkerVersionId>{}</DeleteMarkerVersionId>",
hex::encode(delete_marker_version)
)
.unwrap();
writeln!(&mut retxml, "\t</Deleted>").unwrap();
} }
Err(e) => { Err(e) => {
writeln!(&mut retxml, "\t<Error>").unwrap(); ret_errors.push(s3_xml::DeleteError {
writeln!(&mut retxml, "\t\t<Code>{}</Code>", e.http_status_code()).unwrap(); code: s3_xml::Value(e.aws_code().to_string()),
writeln!(&mut retxml, "\t\t<Key>{}</Key>", xml_escape(&obj.key)).unwrap(); key: Some(s3_xml::Value(obj.key.clone())),
writeln!( message: s3_xml::Value(format!("{}", e)),
&mut retxml, version_id: None,
"\t\t<Message>{}</Message>", });
xml_escape(&format!("{}", e))
)
.unwrap();
writeln!(&mut retxml, "\t</Error>").unwrap();
} }
} }
} }
writeln!(&mut retxml, "</DeleteResult>").unwrap(); let xml = s3_xml::to_xml_with_header(&s3_xml::DeleteResult {
xmlns: (),
deleted: ret_deleted,
errors: ret_errors,
})?;
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(retxml.into_bytes()))?) .body(Body::from(xml))?)
} }
struct DeleteRequest { struct DeleteRequest {

View File

@ -1,5 +1,4 @@
use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Response}; use hyper::{Body, Response};
@ -14,6 +13,7 @@ use garage_table::DeletedFilter;
use crate::encoding::*; use crate::encoding::*;
use crate::error::*; use crate::error::*;
use crate::s3_xml;
#[derive(Debug)] #[derive(Debug)]
pub struct ListObjectsQuery { pub struct ListObjectsQuery {
@ -163,126 +163,81 @@ pub async fn handle_list(
} }
} }
let mut xml = String::new(); let mut result = s3_xml::ListBucketResult {
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); xmlns: (),
writeln!( name: s3_xml::Value(query.bucket.to_string()),
&mut xml, prefix: uriencode_maybe(&query.prefix, query.urlencode_resp),
r#"<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"# marker: None,
) next_marker: None,
.unwrap(); start_after: None,
continuation_token: None,
next_continuation_token: None,
max_keys: s3_xml::IntValue(query.max_keys as i64),
delimiter: query
.delimiter
.as_ref()
.map(|x| uriencode_maybe(x, query.urlencode_resp)),
encoding_type: match query.urlencode_resp {
true => Some(s3_xml::Value("url".to_string())),
false => None,
},
writeln!(&mut xml, "\t<Name>{}</Name>", query.bucket).unwrap(); key_count: Some(s3_xml::IntValue(
result_keys.len() as i64 + result_common_prefixes.len() as i64,
// TODO: in V1, is this supposed to be urlencoded when encoding-type is URL?? )),
writeln!( is_truncated: s3_xml::Value(format!("{}", truncated.is_some())),
&mut xml, contents: vec![],
"\t<Prefix>{}</Prefix>", common_prefixes: vec![],
xml_encode_key(&query.prefix, query.urlencode_resp), };
)
.unwrap();
if let Some(delim) = &query.delimiter {
// TODO: in V1, is this supposed to be urlencoded when encoding-type is URL??
writeln!(
&mut xml,
"\t<Delimiter>{}</Delimiter>",
xml_encode_key(delim, query.urlencode_resp),
)
.unwrap();
}
writeln!(&mut xml, "\t<MaxKeys>{}</MaxKeys>", query.max_keys).unwrap();
if query.urlencode_resp {
writeln!(&mut xml, "\t<EncodingType>url</EncodingType>").unwrap();
}
writeln!(
&mut xml,
"\t<KeyCount>{}</KeyCount>",
result_keys.len() + result_common_prefixes.len()
)
.unwrap();
writeln!(
&mut xml,
"\t<IsTruncated>{}</IsTruncated>",
truncated.is_some()
)
.unwrap();
if query.is_v2 { if query.is_v2 {
if let Some(ct) = &query.continuation_token { if let Some(ct) = &query.continuation_token {
writeln!(&mut xml, "\t<ContinuationToken>{}</ContinuationToken>", ct).unwrap(); result.continuation_token = Some(s3_xml::Value(ct.to_string()));
} }
if let Some(sa) = &query.start_after { if let Some(sa) = &query.start_after {
writeln!( result.start_after = Some(uriencode_maybe(sa, query.urlencode_resp));
&mut xml,
"\t<StartAfter>{}</StartAfter>",
xml_encode_key(sa, query.urlencode_resp)
)
.unwrap();
} }
if let Some(nct) = truncated { if let Some(nct) = truncated {
writeln!( result.next_continuation_token = Some(s3_xml::Value(base64::encode(nct.as_bytes())));
&mut xml,
"\t<NextContinuationToken>{}</NextContinuationToken>",
base64::encode(nct.as_bytes())
)
.unwrap();
} }
} else { } else {
// TODO: are these supposed to be urlencoded when encoding-type is URL?? // TODO: are these supposed to be urlencoded when encoding-type is URL??
if let Some(mkr) = &query.marker { if let Some(mkr) = &query.marker {
writeln!( result.marker = Some(uriencode_maybe(mkr, query.urlencode_resp));
&mut xml,
"\t<Marker>{}</Marker>",
xml_encode_key(mkr, query.urlencode_resp)
)
.unwrap();
} }
if let Some(next_marker) = truncated { if let Some(next_marker) = truncated {
writeln!( result.next_marker = Some(uriencode_maybe(&next_marker, query.urlencode_resp));
&mut xml,
"\t<NextMarker>{}</NextMarker>",
xml_encode_key(&next_marker, query.urlencode_resp)
)
.unwrap();
} }
} }
for (key, info) in result_keys.iter() { for (key, info) in result_keys.iter() {
let last_modif = msec_to_rfc3339(info.last_modified); result.contents.push(s3_xml::ListBucketItem {
writeln!(&mut xml, "\t<Contents>").unwrap(); key: uriencode_maybe(key, query.urlencode_resp),
writeln!( last_modified: s3_xml::Value(msec_to_rfc3339(info.last_modified)),
&mut xml, size: s3_xml::IntValue(info.size as i64),
"\t\t<Key>{}</Key>", etag: s3_xml::Value(info.etag.to_string()),
xml_encode_key(key, query.urlencode_resp), storage_class: s3_xml::Value("STANDARD".to_string()),
) });
.unwrap();
writeln!(&mut xml, "\t\t<LastModified>{}</LastModified>", last_modif).unwrap();
writeln!(&mut xml, "\t\t<Size>{}</Size>", info.size).unwrap();
if !info.etag.is_empty() {
writeln!(&mut xml, "\t\t<ETag>\"{}\"</ETag>", info.etag).unwrap();
}
writeln!(&mut xml, "\t\t<StorageClass>STANDARD</StorageClass>").unwrap();
writeln!(&mut xml, "\t</Contents>").unwrap();
} }
for pfx in result_common_prefixes.iter() { for pfx in result_common_prefixes.iter() {
writeln!(&mut xml, "\t<CommonPrefixes>").unwrap();
//TODO: in V1, are these urlencoded when urlencode_resp is true ?? (proably) //TODO: in V1, are these urlencoded when urlencode_resp is true ?? (proably)
writeln!( result.common_prefixes.push(s3_xml::CommonPrefix {
&mut xml, prefix: uriencode_maybe(pfx, query.urlencode_resp),
"\t\t<Prefix>{}</Prefix>", });
xml_encode_key(pfx, query.urlencode_resp),
)
.unwrap();
writeln!(&mut xml, "\t</CommonPrefixes>").unwrap();
} }
writeln!(&mut xml, "</ListBucketResult>").unwrap(); let xml = s3_xml::to_xml_with_header(&result)?;
debug!("{}", xml);
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(Body::from(xml.into_bytes()))?)
} }
fn uriencode_maybe(s: &str, yes: bool) -> s3_xml::Value {
if yes {
s3_xml::Value(uri_encode(s, true))
} else {
s3_xml::Value(s.to_string())
}
}

View File

@ -1,5 +1,4 @@
use std::collections::{BTreeMap, VecDeque}; use std::collections::{BTreeMap, VecDeque};
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use futures::stream::*; use futures::stream::*;
@ -18,8 +17,8 @@ use garage_model::garage::Garage;
use garage_model::object_table::*; use garage_model::object_table::*;
use garage_model::version_table::*; use garage_model::version_table::*;
use crate::encoding::*;
use crate::error::*; use crate::error::*;
use crate::s3_xml;
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
pub async fn handle_put( pub async fn handle_put(
@ -339,22 +338,13 @@ pub async fn handle_create_multipart_upload(
garage.version_table.insert(&version).await?; garage.version_table.insert(&version).await?;
// Send success response // Send success response
let mut xml = String::new(); let result = s3_xml::InitiateMultipartUploadResult {
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); xmlns: (),
writeln!( bucket: s3_xml::Value(bucket.to_string()),
&mut xml, key: s3_xml::Value(key.to_string()),
r#"<InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"# upload_id: s3_xml::Value(hex::encode(version_uuid)),
) };
.unwrap(); let xml = s3_xml::to_xml_with_header(&result)?;
writeln!(&mut xml, "\t<Bucket>{}</Bucket>", bucket).unwrap();
writeln!(&mut xml, "\t<Key>{}</Key>", xml_escape(key)).unwrap();
writeln!(
&mut xml,
"\t<UploadId>{}</UploadId>",
hex::encode(version_uuid)
)
.unwrap();
writeln!(&mut xml, "</InitiateMultipartUploadResult>").unwrap();
Ok(Response::new(Body::from(xml.into_bytes()))) Ok(Response::new(Body::from(xml.into_bytes())))
} }
@ -520,7 +510,7 @@ pub async fn handle_complete_multipart_upload(
ObjectVersionMeta { ObjectVersionMeta {
headers, headers,
size: total_size, size: total_size,
etag, etag: etag.clone(),
}, },
version.blocks.items()[0].1.hash, version.blocks.items()[0].1.hash,
)); ));
@ -529,22 +519,14 @@ pub async fn handle_complete_multipart_upload(
garage.object_table.insert(&final_object).await?; garage.object_table.insert(&final_object).await?;
// Send response saying ok we're done // Send response saying ok we're done
let mut xml = String::new(); let result = s3_xml::CompleteMultipartUploadResult {
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap(); xmlns: (),
writeln!( location: None,
&mut xml, bucket: s3_xml::Value(bucket),
r#"<CompleteMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"# key: s3_xml::Value(key),
) etag: s3_xml::Value(etag),
.unwrap(); };
writeln!( let xml = s3_xml::to_xml_with_header(&result)?;
&mut xml,
"\t<Location>{}</Location>",
garage.config.s3_api.s3_region
)
.unwrap();
writeln!(&mut xml, "\t<Bucket>{}</Bucket>", bucket).unwrap();
writeln!(&mut xml, "\t<Key>{}</Key>", xml_escape(&key)).unwrap();
writeln!(&mut xml, "</CompleteMultipartUploadResult>").unwrap();
Ok(Response::new(Body::from(xml.into_bytes()))) Ok(Response::new(Body::from(xml.into_bytes())))
} }

597
src/api/s3_xml.rs Normal file
View File

@ -0,0 +1,597 @@
use quick_xml::se::to_string;
use serde::{Serialize, Serializer};
use crate::Error as ApiError;
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
xml.push_str(&to_string(x)?);
Ok(xml)
}
fn xmlns_tag<S: Serializer>(_v: &(), s: S) -> Result<S::Ok, S::Error> {
s.serialize_str("http://s3.amazonaws.com/doc/2006-03-01/")
}
#[derive(Debug, Serialize, PartialEq)]
pub struct Value(#[serde(rename = "$value")] pub String);
#[derive(Debug, Serialize, PartialEq)]
pub struct IntValue(#[serde(rename = "$value")] pub i64);
#[derive(Debug, Serialize, PartialEq)]
pub struct Bucket {
#[serde(rename = "CreationDate")]
pub creation_date: Value,
#[serde(rename = "Name")]
pub name: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct DisplayName(#[serde(rename = "$value")] pub String);
#[derive(Debug, Serialize, PartialEq)]
pub struct Id(#[serde(rename = "$value")] pub String);
#[derive(Debug, Serialize, PartialEq)]
pub struct Owner {
#[serde(rename = "DisplayName")]
pub display_name: Value,
#[serde(rename = "ID")]
pub id: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct BucketList {
#[serde(rename = "Bucket")]
pub entries: Vec<Bucket>,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct ListAllMyBucketsResult {
#[serde(rename = "Buckets")]
pub buckets: BucketList,
#[serde(rename = "Owner")]
pub owner: Owner,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct LocationConstraint {
#[serde(serialize_with = "xmlns_tag")]
pub xmlns: (),
#[serde(rename = "$value")]
pub region: String,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct Deleted {
#[serde(rename = "Key")]
pub key: Value,
#[serde(rename = "VersionId")]
pub version_id: Value,
#[serde(rename = "DeleteMarkerVersionId")]
pub delete_marker_version_id: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct Error {
#[serde(rename = "Code")]
pub code: Value,
#[serde(rename = "Message")]
pub message: Value,
#[serde(rename = "Resource")]
pub resource: Option<Value>,
#[serde(rename = "Region")]
pub region: Option<Value>,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct DeleteError {
#[serde(rename = "Code")]
pub code: Value,
#[serde(rename = "Key")]
pub key: Option<Value>,
#[serde(rename = "Message")]
pub message: Value,
#[serde(rename = "VersionId")]
pub version_id: Option<Value>,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct DeleteResult {
#[serde(serialize_with = "xmlns_tag")]
pub xmlns: (),
#[serde(rename = "Deleted")]
pub deleted: Vec<Deleted>,
#[serde(rename = "Error")]
pub errors: Vec<DeleteError>,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct CopyObjectResult {
#[serde(rename = "LastModified")]
pub last_modified: Value,
#[serde(rename = "ETag")]
pub etag: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct InitiateMultipartUploadResult {
#[serde(serialize_with = "xmlns_tag")]
pub xmlns: (),
#[serde(rename = "Bucket")]
pub bucket: Value,
#[serde(rename = "Key")]
pub key: Value,
#[serde(rename = "UploadId")]
pub upload_id: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct CompleteMultipartUploadResult {
#[serde(serialize_with = "xmlns_tag")]
pub xmlns: (),
#[serde(rename = "Location")]
pub location: Option<Value>,
#[serde(rename = "Bucket")]
pub bucket: Value,
#[serde(rename = "Key")]
pub key: Value,
#[serde(rename = "ETag")]
pub etag: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct ListBucketItem {
#[serde(rename = "Key")]
pub key: Value,
#[serde(rename = "LastModified")]
pub last_modified: Value,
#[serde(rename = "ETag")]
pub etag: Value,
#[serde(rename = "Size")]
pub size: IntValue,
#[serde(rename = "StorageClass")]
pub storage_class: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct CommonPrefix {
#[serde(rename = "Prefix")]
pub prefix: Value,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct ListBucketResult {
#[serde(serialize_with = "xmlns_tag")]
pub xmlns: (),
#[serde(rename = "Name")]
pub name: Value,
#[serde(rename = "Prefix")]
pub prefix: Value,
#[serde(rename = "Marker")]
pub marker: Option<Value>,
#[serde(rename = "NextMarker")]
pub next_marker: Option<Value>,
#[serde(rename = "StartAfter")]
pub start_after: Option<Value>,
#[serde(rename = "ContinuationToken")]
pub continuation_token: Option<Value>,
#[serde(rename = "NextContinuationToken")]
pub next_continuation_token: Option<Value>,
#[serde(rename = "KeyCount")]
pub key_count: Option<IntValue>,
#[serde(rename = "MaxKeys")]
pub max_keys: IntValue,
#[serde(rename = "Delimiter")]
pub delimiter: Option<Value>,
#[serde(rename = "EncodingType")]
pub encoding_type: Option<Value>,
#[serde(rename = "IsTruncated")]
pub is_truncated: Value,
#[serde(rename = "Contents")]
pub contents: Vec<ListBucketItem>,
#[serde(rename = "CommonPrefixes")]
pub common_prefixes: Vec<CommonPrefix>,
}
#[cfg(test)]
mod tests {
use super::*;
use garage_util::time::*;
#[test]
fn error_message() -> Result<(), ApiError> {
let error = Error {
code: Value("TestError".to_string()),
message: Value("A dummy error message".to_string()),
resource: Some(Value("/bucket/a/plop".to_string())),
region: Some(Value("garage".to_string())),
};
assert_eq!(
to_xml_with_header(&error)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<Error>\
<Code>TestError</Code>\
<Message>A dummy error message</Message>\
<Resource>/bucket/a/plop</Resource>\
<Region>garage</Region>\
</Error>"
);
Ok(())
}
#[test]
fn list_all_my_buckets_result() -> Result<(), ApiError> {
let list_buckets = ListAllMyBucketsResult {
owner: Owner {
display_name: Value("owner_name".to_string()),
id: Value("qsdfjklm".to_string()),
},
buckets: BucketList {
entries: vec![
Bucket {
creation_date: Value(msec_to_rfc3339(0)),
name: Value("bucket_A".to_string()),
},
Bucket {
creation_date: Value(msec_to_rfc3339(3600 * 24 * 1000)),
name: Value("bucket_B".to_string()),
},
],
},
};
assert_eq!(
to_xml_with_header(&list_buckets)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<ListAllMyBucketsResult>\
<Buckets>\
<Bucket>\
<CreationDate>1970-01-01T00:00:00.000Z</CreationDate>\
<Name>bucket_A</Name>\
</Bucket>\
<Bucket>\
<CreationDate>1970-01-02T00:00:00.000Z</CreationDate>\
<Name>bucket_B</Name>\
</Bucket>\
</Buckets>\
<Owner>\
<DisplayName>owner_name</DisplayName>\
<ID>qsdfjklm</ID>\
</Owner>\
</ListAllMyBucketsResult>"
);
Ok(())
}
#[test]
fn get_bucket_location_result() -> Result<(), ApiError> {
let get_bucket_location = LocationConstraint {
xmlns: (),
region: "garage".to_string(),
};
assert_eq!(
to_xml_with_header(&get_bucket_location)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<LocationConstraint xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">garage</LocationConstraint>"
);
Ok(())
}
#[test]
fn delete_result() -> Result<(), ApiError> {
let delete_result = DeleteResult {
xmlns: (),
deleted: vec![
Deleted {
key: Value("a/plop".to_string()),
version_id: Value("qsdfjklm".to_string()),
delete_marker_version_id: Value("wxcvbn".to_string()),
},
Deleted {
key: Value("b/plip".to_string()),
version_id: Value("1234".to_string()),
delete_marker_version_id: Value("4321".to_string()),
},
],
errors: vec![
DeleteError {
code: Value("NotFound".to_string()),
key: Some(Value("c/plap".to_string())),
message: Value("Object c/plap not found".to_string()),
version_id: None,
},
DeleteError {
code: Value("Forbidden".to_string()),
key: Some(Value("d/plep".to_string())),
message: Value("Not authorized".to_string()),
version_id: Some(Value("789".to_string())),
},
],
};
assert_eq!(
to_xml_with_header(&delete_result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<DeleteResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Deleted>\
<Key>a/plop</Key>\
<VersionId>qsdfjklm</VersionId>\
<DeleteMarkerVersionId>wxcvbn</DeleteMarkerVersionId>\
</Deleted>\
<Deleted>\
<Key>b/plip</Key>\
<VersionId>1234</VersionId>\
<DeleteMarkerVersionId>4321</DeleteMarkerVersionId>\
</Deleted>\
<Error>\
<Code>NotFound</Code>\
<Key>c/plap</Key>\
<Message>Object c/plap not found</Message>\
</Error>\
<Error>\
<Code>Forbidden</Code>\
<Key>d/plep</Key>\
<Message>Not authorized</Message>\
<VersionId>789</VersionId>\
</Error>\
</DeleteResult>"
);
Ok(())
}
#[test]
fn copy_object_result() -> Result<(), ApiError> {
let copy_result = CopyObjectResult {
last_modified: Value(msec_to_rfc3339(0)),
etag: Value("9b2cf535f27731c974343645a3985328".to_string()),
};
assert_eq!(
to_xml_with_header(&copy_result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<CopyObjectResult>\
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
<ETag>9b2cf535f27731c974343645a3985328</ETag>\
</CopyObjectResult>\
"
);
Ok(())
}
#[test]
fn initiate_multipart_upload_result() -> Result<(), ApiError> {
let result = InitiateMultipartUploadResult {
xmlns: (),
bucket: Value("mybucket".to_string()),
key: Value("a/plop".to_string()),
upload_id: Value("azerty".to_string()),
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<InitiateMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Bucket>mybucket</Bucket>\
<Key>a/plop</Key>\
<UploadId>azerty</UploadId>\
</InitiateMultipartUploadResult>"
);
Ok(())
}
#[test]
fn complete_multipart_upload_result() -> Result<(), ApiError> {
let result = CompleteMultipartUploadResult {
xmlns: (),
location: Some(Value("https://garage.tld/mybucket/a/plop".to_string())),
bucket: Value("mybucket".to_string()),
key: Value("a/plop".to_string()),
etag: Value("3858f62230ac3c915f300c664312c11f-9".to_string()),
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<CompleteMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Location>https://garage.tld/mybucket/a/plop</Location>\
<Bucket>mybucket</Bucket>\
<Key>a/plop</Key>\
<ETag>3858f62230ac3c915f300c664312c11f-9</ETag>\
</CompleteMultipartUploadResult>"
);
Ok(())
}
#[test]
fn list_objects_v1_1() -> Result<(), ApiError> {
let result = ListBucketResult {
xmlns: (),
name: Value("example-bucket".to_string()),
prefix: Value("".to_string()),
marker: Some(Value("".to_string())),
next_marker: None,
start_after: None,
continuation_token: None,
next_continuation_token: None,
key_count: None,
max_keys: IntValue(1000),
encoding_type: None,
delimiter: Some(Value("/".to_string())),
is_truncated: Value("false".to_string()),
contents: vec![ListBucketItem {
key: Value("sample.jpg".to_string()),
last_modified: Value(msec_to_rfc3339(0)),
etag: Value("bf1d737a4d46a19f3bced6905cc8b902".to_string()),
size: IntValue(142863),
storage_class: Value("STANDARD".to_string()),
}],
common_prefixes: vec![CommonPrefix {
prefix: Value("photos/".to_string()),
}],
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Name>example-bucket</Name>\
<Prefix></Prefix>\
<Marker></Marker>\
<MaxKeys>1000</MaxKeys>\
<Delimiter>/</Delimiter>\
<IsTruncated>false</IsTruncated>\
<Contents>\
<Key>sample.jpg</Key>\
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
<ETag>bf1d737a4d46a19f3bced6905cc8b902</ETag>\
<Size>142863</Size>\
<StorageClass>STANDARD</StorageClass>\
</Contents>\
<CommonPrefixes>\
<Prefix>photos/</Prefix>\
</CommonPrefixes>\
</ListBucketResult>"
);
Ok(())
}
#[test]
fn list_objects_v1_2() -> Result<(), ApiError> {
let result = ListBucketResult {
xmlns: (),
name: Value("example-bucket".to_string()),
prefix: Value("photos/2006/".to_string()),
marker: Some(Value("".to_string())),
next_marker: None,
start_after: None,
continuation_token: None,
next_continuation_token: None,
key_count: None,
max_keys: IntValue(1000),
delimiter: Some(Value("/".to_string())),
encoding_type: None,
is_truncated: Value("false".to_string()),
contents: vec![],
common_prefixes: vec![
CommonPrefix {
prefix: Value("photos/2006/February/".to_string()),
},
CommonPrefix {
prefix: Value("photos/2006/January/".to_string()),
},
],
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Name>example-bucket</Name>\
<Prefix>photos/2006/</Prefix>\
<Marker></Marker>\
<MaxKeys>1000</MaxKeys>\
<Delimiter>/</Delimiter>\
<IsTruncated>false</IsTruncated>\
<CommonPrefixes>\
<Prefix>photos/2006/February/</Prefix>\
</CommonPrefixes>\
<CommonPrefixes>\
<Prefix>photos/2006/January/</Prefix>\
</CommonPrefixes>\
</ListBucketResult>"
);
Ok(())
}
#[test]
fn list_objects_v2_1() -> Result<(), ApiError> {
let result = ListBucketResult {
xmlns: (),
name: Value("quotes".to_string()),
prefix: Value("E".to_string()),
marker: None,
next_marker: None,
start_after: Some(Value("ExampleGuide.pdf".to_string())),
continuation_token: None,
next_continuation_token: None,
key_count: None,
max_keys: IntValue(3),
delimiter: None,
encoding_type: None,
is_truncated: Value("false".to_string()),
contents: vec![ListBucketItem {
key: Value("ExampleObject.txt".to_string()),
last_modified: Value(msec_to_rfc3339(0)),
etag: Value("599bab3ed2c697f1d26842727561fd94".to_string()),
size: IntValue(857),
storage_class: Value("REDUCED_REDUNDANCY".to_string()),
}],
common_prefixes: vec![],
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Name>quotes</Name>\
<Prefix>E</Prefix>\
<StartAfter>ExampleGuide.pdf</StartAfter>\
<MaxKeys>3</MaxKeys>\
<IsTruncated>false</IsTruncated>\
<Contents>\
<Key>ExampleObject.txt</Key>\
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
<ETag>599bab3ed2c697f1d26842727561fd94</ETag>\
<Size>857</Size>\
<StorageClass>REDUCED_REDUNDANCY</StorageClass>\
</Contents>\
</ListBucketResult>"
);
Ok(())
}
#[test]
fn list_objects_v2_2() -> Result<(), ApiError> {
let result = ListBucketResult {
xmlns: (),
name: Value("bucket".to_string()),
prefix: Value("".to_string()),
marker: None,
next_marker: None,
start_after: None,
continuation_token: Some(Value(
"1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=".to_string(),
)),
next_continuation_token: Some(Value("qsdfjklm".to_string())),
key_count: Some(IntValue(112)),
max_keys: IntValue(1000),
delimiter: None,
encoding_type: None,
is_truncated: Value("false".to_string()),
contents: vec![ListBucketItem {
key: Value("happyfacex.jpg".to_string()),
last_modified: Value(msec_to_rfc3339(0)),
etag: Value("70ee1738b6b21e2c8a43f3a5ab0eee71".to_string()),
size: IntValue(1111),
storage_class: Value("STANDARD".to_string()),
}],
common_prefixes: vec![],
};
assert_eq!(
to_xml_with_header(&result)?,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
<Name>bucket</Name>\
<Prefix></Prefix>\
<ContinuationToken>1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=</ContinuationToken>\
<NextContinuationToken>qsdfjklm</NextContinuationToken>\
<KeyCount>112</KeyCount>\
<MaxKeys>1000</MaxKeys>\
<IsTruncated>false</IsTruncated>\
<Contents>\
<Key>happyfacex.jpg</Key>\
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
<ETag>70ee1738b6b21e2c8a43f3a5ab0eee71</ETag>\
<Size>1111</Size>\
<StorageClass>STANDARD</StorageClass>\
</Contents>\
</ListBucketResult>"
);
Ok(())
}
}

View File

@ -16,5 +16,5 @@ pub fn msec_to_rfc3339(msecs: u64) -> String {
let secs = msecs as i64 / 1000; let secs = msecs as i64 / 1000;
let nanos = (msecs as i64 % 1000) as u32 * 1_000_000; let nanos = (msecs as i64 % 1000) as u32 * 1_000_000;
let timestamp = Utc.timestamp(secs, nanos); let timestamp = Utc.timestamp(secs, nanos);
timestamp.to_rfc3339_opts(SecondsFormat::Secs, true) timestamp.to_rfc3339_opts(SecondsFormat::Millis, true)
} }