First version of admin API (#298)
**Spec:** - [x] Start writing - [x] Specify all layout endpoints - [x] Specify all endpoints for operations on keys - [x] Specify all endpoints for operations on key/bucket permissions - [x] Specify all endpoints for operations on buckets - [x] Specify all endpoints for operations on bucket aliases View rendered spec at <https://git.deuxfleurs.fr/Deuxfleurs/garage/src/branch/admin-api/doc/drafts/admin-api.md> **Code:** - [x] Refactor code for admin api to use common api code that was created for K2V **General endpoints:** - [x] Metrics - [x] GetClusterStatus - [x] ConnectClusterNodes - [x] GetClusterLayout - [x] UpdateClusterLayout - [x] ApplyClusterLayout - [x] RevertClusterLayout **Key-related endpoints:** - [x] ListKeys - [x] CreateKey - [x] ImportKey - [x] GetKeyInfo - [x] UpdateKey - [x] DeleteKey **Bucket-related endpoints:** - [x] ListBuckets - [x] CreateBucket - [x] GetBucketInfo - [x] DeleteBucket - [x] PutBucketWebsite - [x] DeleteBucketWebsite **Operations on key/bucket permissions:** - [x] BucketAllowKey - [x] BucketDenyKey **Operations on bucket aliases:** - [x] GlobalAliasBucket - [x] GlobalUnaliasBucket - [x] LocalAliasBucket - [x] LocalUnaliasBucket **And also:** - [x] Separate error type for the admin API (this PR includes a quite big refactoring of error handling) - [x] Add management of website access - [ ] Check that nothing is missing wrt what can be done using the CLI - [ ] Improve formatting of the spec - [x] Make sure everyone is cool with the API design Fix #231 Fix #295 Co-authored-by: Alex Auvolat <alex@adnab.me> Reviewed-on: https://git.deuxfleurs.fr/Deuxfleurs/garage/pulls/298 Co-authored-by: Alex <alex@adnab.me> Co-committed-by: Alex <alex@adnab.me>
This commit is contained in:
parent
64c193e3db
commit
382e74c798
28
Cargo.lock
generated
28
Cargo.lock
generated
@ -888,21 +888,23 @@ dependencies = [
|
|||||||
"chrono",
|
"chrono",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"garage_admin",
|
|
||||||
"garage_api",
|
"garage_api",
|
||||||
"garage_model 0.7.0",
|
"garage_model 0.7.0",
|
||||||
"garage_rpc 0.7.0",
|
"garage_rpc 0.7.0",
|
||||||
"garage_table 0.7.0",
|
"garage_table 0.7.0",
|
||||||
"garage_util 0.7.0",
|
"garage_util 0.7.0",
|
||||||
"garage_web",
|
"garage_web",
|
||||||
"git-version",
|
|
||||||
"hex",
|
"hex",
|
||||||
"hmac 0.10.1",
|
"hmac 0.10.1",
|
||||||
"http",
|
"http",
|
||||||
"hyper",
|
"hyper",
|
||||||
"kuska-sodiumoxide",
|
"kuska-sodiumoxide",
|
||||||
"netapp 0.4.4",
|
"netapp 0.4.4",
|
||||||
|
"opentelemetry",
|
||||||
|
"opentelemetry-otlp",
|
||||||
|
"opentelemetry-prometheus",
|
||||||
"pretty_env_logger",
|
"pretty_env_logger",
|
||||||
|
"prometheus",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rmp-serde 0.15.5",
|
"rmp-serde 0.15.5",
|
||||||
"serde",
|
"serde",
|
||||||
@ -917,23 +919,6 @@ dependencies = [
|
|||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "garage_admin"
|
|
||||||
version = "0.7.0"
|
|
||||||
dependencies = [
|
|
||||||
"futures",
|
|
||||||
"futures-util",
|
|
||||||
"garage_util 0.7.0",
|
|
||||||
"hex",
|
|
||||||
"http",
|
|
||||||
"hyper",
|
|
||||||
"opentelemetry",
|
|
||||||
"opentelemetry-otlp",
|
|
||||||
"opentelemetry-prometheus",
|
|
||||||
"prometheus",
|
|
||||||
"tracing",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "garage_api"
|
name = "garage_api"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
@ -963,8 +948,11 @@ dependencies = [
|
|||||||
"multer",
|
"multer",
|
||||||
"nom",
|
"nom",
|
||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
|
"opentelemetry-otlp",
|
||||||
|
"opentelemetry-prometheus",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project 1.0.10",
|
"pin-project 1.0.10",
|
||||||
|
"prometheus",
|
||||||
"quick-xml",
|
"quick-xml",
|
||||||
"roxmltree",
|
"roxmltree",
|
||||||
"serde",
|
"serde",
|
||||||
@ -1089,9 +1077,9 @@ dependencies = [
|
|||||||
"bytes 1.1.0",
|
"bytes 1.1.0",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"garage_admin",
|
|
||||||
"garage_util 0.7.0",
|
"garage_util 0.7.0",
|
||||||
"gethostname",
|
"gethostname",
|
||||||
|
"git-version",
|
||||||
"hex",
|
"hex",
|
||||||
"hyper",
|
"hyper",
|
||||||
"k8s-openapi",
|
"k8s-openapi",
|
||||||
|
193
Cargo.nix
193
Cargo.nix
@ -6,7 +6,6 @@ args@{
|
|||||||
rootFeatures ? [
|
rootFeatures ? [
|
||||||
"garage_util/default"
|
"garage_util/default"
|
||||||
"garage_rpc/default"
|
"garage_rpc/default"
|
||||||
"garage_admin/default"
|
|
||||||
"garage_table/default"
|
"garage_table/default"
|
||||||
"garage_block/default"
|
"garage_block/default"
|
||||||
"garage_model/default"
|
"garage_model/default"
|
||||||
@ -48,7 +47,6 @@ in
|
|||||||
workspace = {
|
workspace = {
|
||||||
garage_util = rustPackages.unknown.garage_util."0.7.0";
|
garage_util = rustPackages.unknown.garage_util."0.7.0";
|
||||||
garage_rpc = rustPackages.unknown.garage_rpc."0.7.0";
|
garage_rpc = rustPackages.unknown.garage_rpc."0.7.0";
|
||||||
garage_admin = rustPackages.unknown.garage_admin."0.7.0";
|
|
||||||
garage_table = rustPackages.unknown.garage_table."0.7.0";
|
garage_table = rustPackages.unknown.garage_table."0.7.0";
|
||||||
garage_block = rustPackages.unknown.garage_block."0.7.0";
|
garage_block = rustPackages.unknown.garage_block."0.7.0";
|
||||||
garage_model = rustPackages.unknown.garage_model."0.7.0";
|
garage_model = rustPackages.unknown.garage_model."0.7.0";
|
||||||
@ -1220,7 +1218,7 @@ in
|
|||||||
[ "async-await" ]
|
[ "async-await" ]
|
||||||
[ "async-await-macro" ]
|
[ "async-await-macro" ]
|
||||||
[ "channel" ]
|
[ "channel" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "default")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "default")
|
||||||
[ "futures-channel" ]
|
[ "futures-channel" ]
|
||||||
[ "futures-io" ]
|
[ "futures-io" ]
|
||||||
[ "futures-macro" ]
|
[ "futures-macro" ]
|
||||||
@ -1269,18 +1267,20 @@ in
|
|||||||
bytes = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
bytes = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
||||||
futures = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.21" { inherit profileName; };
|
futures = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.21" { inherit profileName; };
|
||||||
futures_util = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
futures_util = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
||||||
garage_admin = rustPackages."unknown".garage_admin."0.7.0" { inherit profileName; };
|
|
||||||
garage_api = rustPackages."unknown".garage_api."0.7.0" { inherit profileName; };
|
garage_api = rustPackages."unknown".garage_api."0.7.0" { inherit profileName; };
|
||||||
garage_model = rustPackages."unknown".garage_model."0.7.0" { inherit profileName; };
|
garage_model = rustPackages."unknown".garage_model."0.7.0" { inherit profileName; };
|
||||||
garage_rpc = rustPackages."unknown".garage_rpc."0.7.0" { inherit profileName; };
|
garage_rpc = rustPackages."unknown".garage_rpc."0.7.0" { inherit profileName; };
|
||||||
garage_table = rustPackages."unknown".garage_table."0.7.0" { inherit profileName; };
|
garage_table = rustPackages."unknown".garage_table."0.7.0" { inherit profileName; };
|
||||||
garage_util = rustPackages."unknown".garage_util."0.7.0" { inherit profileName; };
|
garage_util = rustPackages."unknown".garage_util."0.7.0" { inherit profileName; };
|
||||||
garage_web = rustPackages."unknown".garage_web."0.7.0" { inherit profileName; };
|
garage_web = rustPackages."unknown".garage_web."0.7.0" { inherit profileName; };
|
||||||
git_version = rustPackages."registry+https://github.com/rust-lang/crates.io-index".git-version."0.3.5" { inherit profileName; };
|
|
||||||
hex = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; };
|
hex = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; };
|
||||||
sodiumoxide = rustPackages."registry+https://github.com/rust-lang/crates.io-index".kuska-sodiumoxide."0.2.5-0" { inherit profileName; };
|
sodiumoxide = rustPackages."registry+https://github.com/rust-lang/crates.io-index".kuska-sodiumoxide."0.2.5-0" { inherit profileName; };
|
||||||
netapp = rustPackages."registry+https://github.com/rust-lang/crates.io-index".netapp."0.4.4" { inherit profileName; };
|
netapp = rustPackages."registry+https://github.com/rust-lang/crates.io-index".netapp."0.4.4" { inherit profileName; };
|
||||||
|
opentelemetry = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; };
|
||||||
|
opentelemetry_otlp = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-otlp."0.10.0" { inherit profileName; };
|
||||||
|
opentelemetry_prometheus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-prometheus."0.10.0" { inherit profileName; };
|
||||||
pretty_env_logger = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pretty_env_logger."0.4.0" { inherit profileName; };
|
pretty_env_logger = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pretty_env_logger."0.4.0" { inherit profileName; };
|
||||||
|
prometheus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".prometheus."0.13.0" { inherit profileName; };
|
||||||
rand = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; };
|
rand = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; };
|
||||||
rmp_serde = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rmp-serde."0.15.5" { inherit profileName; };
|
rmp_serde = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rmp-serde."0.15.5" { inherit profileName; };
|
||||||
serde = rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.137" { inherit profileName; };
|
serde = rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.137" { inherit profileName; };
|
||||||
@ -1305,26 +1305,6 @@ in
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
"unknown".garage_admin."0.7.0" = overridableMkRustCrate (profileName: rec {
|
|
||||||
name = "garage_admin";
|
|
||||||
version = "0.7.0";
|
|
||||||
registry = "unknown";
|
|
||||||
src = fetchCrateLocal (workspaceSrc + "/src/admin");
|
|
||||||
dependencies = {
|
|
||||||
futures = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.21" { inherit profileName; };
|
|
||||||
futures_util = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
|
||||||
garage_util = rustPackages."unknown".garage_util."0.7.0" { inherit profileName; };
|
|
||||||
hex = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; };
|
|
||||||
http = rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.6" { inherit profileName; };
|
|
||||||
hyper = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.18" { inherit profileName; };
|
|
||||||
opentelemetry = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; };
|
|
||||||
opentelemetry_otlp = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-otlp."0.10.0" { inherit profileName; };
|
|
||||||
opentelemetry_prometheus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-prometheus."0.10.0" { inherit profileName; };
|
|
||||||
prometheus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".prometheus."0.13.0" { inherit profileName; };
|
|
||||||
tracing = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.32" { inherit profileName; };
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
"unknown".garage_api."0.7.0" = overridableMkRustCrate (profileName: rec {
|
"unknown".garage_api."0.7.0" = overridableMkRustCrate (profileName: rec {
|
||||||
name = "garage_api";
|
name = "garage_api";
|
||||||
version = "0.7.0";
|
version = "0.7.0";
|
||||||
@ -1359,8 +1339,11 @@ in
|
|||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "multer" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".multer."2.0.2" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "multer" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".multer."2.0.2" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "nom" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "nom" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.1" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "opentelemetry" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "opentelemetry" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; };
|
||||||
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "opentelemetry_otlp" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-otlp."0.10.0" { inherit profileName; };
|
||||||
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "opentelemetry_prometheus" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry-prometheus."0.10.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "percent_encoding" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.1.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "percent_encoding" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.1.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "pin_project" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.0.10" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "pin_project" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.0.10" { inherit profileName; };
|
||||||
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "prometheus" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".prometheus."0.13.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "quick_xml" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.21.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "quick_xml" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.21.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "roxmltree" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".roxmltree."0.14.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "roxmltree" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".roxmltree."0.14.1" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "serde" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.137" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "serde" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.137" { inherit profileName; };
|
||||||
@ -1506,9 +1489,9 @@ in
|
|||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "bytes" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "bytes" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.21" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.21" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "garage_admin" else null } = rustPackages."unknown".garage_admin."0.7.0" { inherit profileName; };
|
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "garage_util" else null } = rustPackages."unknown".garage_util."0.7.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "garage_util" else null } = rustPackages."unknown".garage_util."0.7.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "gethostname" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".gethostname."0.2.3" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "gethostname" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".gethostname."0.2.3" { inherit profileName; };
|
||||||
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "git_version" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".git-version."0.3.5" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "hex" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "hex" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "hyper" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.18" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "hyper" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.18" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc" then "k8s_openapi" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".k8s-openapi."0.13.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc" then "k8s_openapi" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".k8s-openapi."0.13.1" { inherit profileName; };
|
||||||
@ -1898,31 +1881,31 @@ in
|
|||||||
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
||||||
src = fetchCratesIo { inherit name version; sha256 = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2"; };
|
src = fetchCratesIo { inherit name version; sha256 = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2"; };
|
||||||
features = builtins.concatLists [
|
features = builtins.concatLists [
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "client")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "client")
|
||||||
[ "default" ]
|
[ "default" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "full")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "full")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "h2")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "h2")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "http1")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "http1")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "http2")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "http2")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "runtime")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "runtime")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "server")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "server")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "socket2")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "socket2")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "stream")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "stream")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "tcp")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "tcp")
|
||||||
];
|
];
|
||||||
dependencies = {
|
dependencies = {
|
||||||
bytes = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
bytes = rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.1.0" { inherit profileName; };
|
||||||
futures_channel = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.21" { inherit profileName; };
|
futures_channel = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.21" { inherit profileName; };
|
||||||
futures_core = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.21" { inherit profileName; };
|
futures_core = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.21" { inherit profileName; };
|
||||||
futures_util = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
futures_util = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client" then "h2" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.3.12" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client" then "h2" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.3.12" { inherit profileName; };
|
||||||
http = rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.6" { inherit profileName; };
|
http = rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.6" { inherit profileName; };
|
||||||
http_body = rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.4" { inherit profileName; };
|
http_body = rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.4" { inherit profileName; };
|
||||||
httparse = rustPackages."registry+https://github.com/rust-lang/crates.io-index".httparse."1.6.0" { inherit profileName; };
|
httparse = rustPackages."registry+https://github.com/rust-lang/crates.io-index".httparse."1.6.0" { inherit profileName; };
|
||||||
httpdate = rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.2" { inherit profileName; };
|
httpdate = rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.2" { inherit profileName; };
|
||||||
itoa = rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.1" { inherit profileName; };
|
itoa = rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.1" { inherit profileName; };
|
||||||
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client" then "socket2" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.4.4" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client" then "socket2" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.4.4" { inherit profileName; };
|
||||||
tokio = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.17.0" { inherit profileName; };
|
tokio = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.17.0" { inherit profileName; };
|
||||||
tower_service = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.1" { inherit profileName; };
|
tower_service = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.1" { inherit profileName; };
|
||||||
tracing = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.32" { inherit profileName; };
|
tracing = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.32" { inherit profileName; };
|
||||||
@ -3341,7 +3324,7 @@ in
|
|||||||
[ "getrandom" ]
|
[ "getrandom" ]
|
||||||
[ "libc" ]
|
[ "libc" ]
|
||||||
[ "rand_chacha" ]
|
[ "rand_chacha" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "small_rng")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "small_rng")
|
||||||
[ "std" ]
|
[ "std" ]
|
||||||
[ "std_rng" ]
|
[ "std_rng" ]
|
||||||
];
|
];
|
||||||
@ -3434,28 +3417,28 @@ in
|
|||||||
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
||||||
src = fetchCratesIo { inherit name version; sha256 = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"; };
|
src = fetchCratesIo { inherit name version; sha256 = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"; };
|
||||||
features = builtins.concatLists [
|
features = builtins.concatLists [
|
||||||
[ "aho-corasick" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "aho-corasick")
|
||||||
[ "default" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "default")
|
||||||
[ "memchr" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "memchr")
|
||||||
[ "perf" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "perf")
|
||||||
[ "perf-cache" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "perf-cache")
|
||||||
[ "perf-dfa" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "perf-dfa")
|
||||||
[ "perf-inline" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "perf-inline")
|
||||||
[ "perf-literal" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "perf-literal")
|
||||||
[ "std" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "std")
|
||||||
[ "unicode" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode")
|
||||||
[ "unicode-age" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-age")
|
||||||
[ "unicode-bool" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-bool")
|
||||||
[ "unicode-case" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-case")
|
||||||
[ "unicode-gencat" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-gencat")
|
||||||
[ "unicode-perl" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-perl")
|
||||||
[ "unicode-script" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-script")
|
||||||
[ "unicode-segment" ]
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "unicode-segment")
|
||||||
];
|
];
|
||||||
dependencies = {
|
dependencies = {
|
||||||
aho_corasick = rustPackages."registry+https://github.com/rust-lang/crates.io-index".aho-corasick."0.7.18" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "aho_corasick" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".aho-corasick."0.7.18" { inherit profileName; };
|
||||||
memchr = rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.4.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "memchr" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.4.1" { inherit profileName; };
|
||||||
regex_syntax = rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-syntax."0.6.25" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "regex_syntax" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-syntax."0.6.25" { inherit profileName; };
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -4302,8 +4285,8 @@ in
|
|||||||
[ "bytes" ]
|
[ "bytes" ]
|
||||||
[ "default" ]
|
[ "default" ]
|
||||||
[ "fs" ]
|
[ "fs" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "full")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "full")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "io-std")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "io-std")
|
||||||
[ "io-util" ]
|
[ "io-util" ]
|
||||||
[ "libc" ]
|
[ "libc" ]
|
||||||
[ "macros" ]
|
[ "macros" ]
|
||||||
@ -4312,8 +4295,8 @@ in
|
|||||||
[ "net" ]
|
[ "net" ]
|
||||||
[ "num_cpus" ]
|
[ "num_cpus" ]
|
||||||
[ "once_cell" ]
|
[ "once_cell" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "parking_lot")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "parking_lot")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "process")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "process")
|
||||||
[ "rt" ]
|
[ "rt" ]
|
||||||
[ "rt-multi-thread" ]
|
[ "rt-multi-thread" ]
|
||||||
[ "signal" ]
|
[ "signal" ]
|
||||||
@ -4331,7 +4314,7 @@ in
|
|||||||
mio = rustPackages."registry+https://github.com/rust-lang/crates.io-index".mio."0.8.2" { inherit profileName; };
|
mio = rustPackages."registry+https://github.com/rust-lang/crates.io-index".mio."0.8.2" { inherit profileName; };
|
||||||
num_cpus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".num_cpus."1.13.1" { inherit profileName; };
|
num_cpus = rustPackages."registry+https://github.com/rust-lang/crates.io-index".num_cpus."1.13.1" { inherit profileName; };
|
||||||
once_cell = rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.10.0" { inherit profileName; };
|
once_cell = rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.10.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "parking_lot" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".parking_lot."0.12.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "parking_lot" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".parking_lot."0.12.0" { inherit profileName; };
|
||||||
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
||||||
${ if hostPlatform.isUnix then "signal_hook_registry" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".signal-hook-registry."1.4.0" { inherit profileName; };
|
${ if hostPlatform.isUnix then "signal_hook_registry" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".signal-hook-registry."1.4.0" { inherit profileName; };
|
||||||
socket2 = rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.4.4" { inherit profileName; };
|
socket2 = rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.4.4" { inherit profileName; };
|
||||||
@ -4409,9 +4392,9 @@ in
|
|||||||
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
||||||
src = fetchCratesIo { inherit name version; sha256 = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0"; };
|
src = fetchCratesIo { inherit name version; sha256 = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0"; };
|
||||||
features = builtins.concatLists [
|
features = builtins.concatLists [
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "codec")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "codec")
|
||||||
[ "compat" ]
|
[ "compat" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "default")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "default")
|
||||||
[ "futures-io" ]
|
[ "futures-io" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc") "io")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc") "io")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc") "slab")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_rpc") "slab")
|
||||||
@ -4528,43 +4511,43 @@ in
|
|||||||
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
registry = "registry+https://github.com/rust-lang/crates.io-index";
|
||||||
src = fetchCratesIo { inherit name version; sha256 = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e"; };
|
src = fetchCratesIo { inherit name version; sha256 = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e"; };
|
||||||
features = builtins.concatLists [
|
features = builtins.concatLists [
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "__common")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "__common")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "balance")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "balance")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "buffer")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "buffer")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "default")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "default")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "discover")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "discover")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "futures-core")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "futures-core")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "futures-util")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "futures-util")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "indexmap")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "indexmap")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "limit")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "limit")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "load")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "load")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "log")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "log")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "make")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "make")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "pin-project")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "pin-project")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "pin-project-lite")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "pin-project-lite")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "rand")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "rand")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "ready-cache")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "ready-cache")
|
||||||
(lib.optional (rootFeatures' ? "garage") "retry")
|
(lib.optional (rootFeatures' ? "garage") "retry")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "slab")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "slab")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "timeout")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web") "timeout")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "tokio")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "tokio")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "tokio-util")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "tokio-util")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "tracing")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "tracing")
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "util")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "util")
|
||||||
];
|
];
|
||||||
dependencies = {
|
dependencies = {
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures_core" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.21" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "futures_core" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.21" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "futures_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "futures_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.21" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "indexmap" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."1.8.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "indexmap" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."1.8.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "pin_project" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.0.10" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "pin_project" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.0.10" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "pin_project_lite" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "pin_project_lite" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "rand" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "rand" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "slab" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.5" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" then "slab" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.5" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "tokio" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.17.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "tokio" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.17.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "tokio_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.0" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "tokio_util" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "tower_layer" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "tower_layer" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.1" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "tower_service" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.1" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "tower_service" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.1" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "tracing" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.32" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "tracing" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.32" { inherit profileName; };
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -4617,14 +4600,14 @@ in
|
|||||||
features = builtins.concatLists [
|
features = builtins.concatLists [
|
||||||
[ "attributes" ]
|
[ "attributes" ]
|
||||||
[ "default" ]
|
[ "default" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web") "log")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web") "log")
|
||||||
(lib.optional (rootFeatures' ? "garage") "log-always")
|
(lib.optional (rootFeatures' ? "garage") "log-always")
|
||||||
[ "std" ]
|
[ "std" ]
|
||||||
[ "tracing-attributes" ]
|
[ "tracing-attributes" ]
|
||||||
];
|
];
|
||||||
dependencies = {
|
dependencies = {
|
||||||
cfg_if = rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; };
|
cfg_if = rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; };
|
||||||
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" then "log" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.16" { inherit profileName; };
|
${ if rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_web" then "log" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.16" { inherit profileName; };
|
||||||
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
pin_project_lite = rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.8" { inherit profileName; };
|
||||||
tracing_attributes = buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-attributes."0.1.20" { profileName = "__noProfile"; };
|
tracing_attributes = buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-attributes."0.1.20" { profileName = "__noProfile"; };
|
||||||
tracing_core = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.23" { inherit profileName; };
|
tracing_core = rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.23" { inherit profileName; };
|
||||||
@ -4986,7 +4969,7 @@ in
|
|||||||
[ "std" ]
|
[ "std" ]
|
||||||
[ "synchapi" ]
|
[ "synchapi" ]
|
||||||
[ "sysinfoapi" ]
|
[ "sysinfoapi" ]
|
||||||
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_admin" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_block" || rootFeatures' ? "garage_model" || rootFeatures' ? "garage_rpc" || rootFeatures' ? "garage_table" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "threadpoollegacyapiset")
|
(lib.optional (rootFeatures' ? "garage" || rootFeatures' ? "garage_api" || rootFeatures' ? "garage_web" || rootFeatures' ? "k2v-client") "threadpoollegacyapiset")
|
||||||
[ "timezoneapi" ]
|
[ "timezoneapi" ]
|
||||||
[ "winbase" ]
|
[ "winbase" ]
|
||||||
[ "wincon" ]
|
[ "wincon" ]
|
||||||
@ -5049,8 +5032,8 @@ in
|
|||||||
${ if hostPlatform.config == "aarch64-uwp-windows-msvc" || hostPlatform.config == "aarch64-pc-windows-msvc" then "windows_aarch64_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_aarch64_msvc."0.32.0" { inherit profileName; };
|
${ if hostPlatform.config == "aarch64-uwp-windows-msvc" || hostPlatform.config == "aarch64-pc-windows-msvc" then "windows_aarch64_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_aarch64_msvc."0.32.0" { inherit profileName; };
|
||||||
${ if hostPlatform.config == "i686-pc-windows-gnu" || hostPlatform.config == "i686-uwp-windows-gnu" then "windows_i686_gnu" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_i686_gnu."0.32.0" { inherit profileName; };
|
${ if hostPlatform.config == "i686-pc-windows-gnu" || hostPlatform.config == "i686-uwp-windows-gnu" then "windows_i686_gnu" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_i686_gnu."0.32.0" { inherit profileName; };
|
||||||
${ if hostPlatform.config == "i686-uwp-windows-msvc" || hostPlatform.config == "i686-pc-windows-msvc" then "windows_i686_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_i686_msvc."0.32.0" { inherit profileName; };
|
${ if hostPlatform.config == "i686-uwp-windows-msvc" || hostPlatform.config == "i686-pc-windows-msvc" then "windows_i686_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_i686_msvc."0.32.0" { inherit profileName; };
|
||||||
${ if hostPlatform.config == "x86_64-uwp-windows-gnu" || hostPlatform.config == "x86_64-pc-windows-gnu" then "windows_x86_64_gnu" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_x86_64_gnu."0.32.0" { inherit profileName; };
|
${ if hostPlatform.config == "x86_64-pc-windows-gnu" || hostPlatform.config == "x86_64-uwp-windows-gnu" then "windows_x86_64_gnu" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_x86_64_gnu."0.32.0" { inherit profileName; };
|
||||||
${ if hostPlatform.config == "x86_64-uwp-windows-msvc" || hostPlatform.config == "x86_64-pc-windows-msvc" then "windows_x86_64_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_x86_64_msvc."0.32.0" { inherit profileName; };
|
${ if hostPlatform.config == "x86_64-pc-windows-msvc" || hostPlatform.config == "x86_64-uwp-windows-msvc" then "windows_x86_64_msvc" else null } = rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows_x86_64_msvc."0.32.0" { inherit profileName; };
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@ members = [
|
|||||||
"src/table",
|
"src/table",
|
||||||
"src/block",
|
"src/block",
|
||||||
"src/model",
|
"src/model",
|
||||||
"src/admin",
|
|
||||||
"src/api",
|
"src/api",
|
||||||
"src/web",
|
"src/web",
|
||||||
"src/garage",
|
"src/garage",
|
||||||
|
2
Makefile
2
Makefile
@ -1,7 +1,7 @@
|
|||||||
.PHONY: doc all release shell
|
.PHONY: doc all release shell
|
||||||
|
|
||||||
all:
|
all:
|
||||||
clear; cargo build --features k2v
|
clear; cargo build --all-features
|
||||||
|
|
||||||
doc:
|
doc:
|
||||||
cd doc/book; mdbook build
|
cd doc/book; mdbook build
|
||||||
|
603
doc/drafts/admin-api.md
Normal file
603
doc/drafts/admin-api.md
Normal file
@ -0,0 +1,603 @@
|
|||||||
|
# Specification of Garage's administration API
|
||||||
|
|
||||||
|
|
||||||
|
**WARNING.** At this point, there is no comittement to stability of the APIs described in this document.
|
||||||
|
We will bump the version numbers prefixed to each API endpoint at each time the syntax
|
||||||
|
or semantics change, meaning that code that relies on these endpoint will break
|
||||||
|
when changes are introduced.
|
||||||
|
|
||||||
|
|
||||||
|
## Access control
|
||||||
|
|
||||||
|
The admin API uses two different tokens for acces control, that are specified in the config file's `[admin]` section:
|
||||||
|
|
||||||
|
- `metrics_token`: the token for accessing the Metrics endpoint (if this token is not set in the config file, the Metrics endpoint can be accessed without access control);
|
||||||
|
- `admin_token`: the token for accessing all of the other administration endpoints (if this token is not set in the config file, access to these endpoints is disabled entirely).
|
||||||
|
|
||||||
|
## Administration API endpoints
|
||||||
|
|
||||||
|
### Metrics-related endpoints
|
||||||
|
|
||||||
|
#### Metrics `GET /metrics`
|
||||||
|
|
||||||
|
Returns internal Garage metrics in Prometheus format.
|
||||||
|
|
||||||
|
### Cluster operations
|
||||||
|
|
||||||
|
#### GetClusterStatus `GET /v0/status`
|
||||||
|
|
||||||
|
Returns the cluster's current status in JSON, including:
|
||||||
|
|
||||||
|
- ID of the node being queried and its version of the Garage daemon
|
||||||
|
- Live nodes
|
||||||
|
- Currently configured cluster layout
|
||||||
|
- Staged changes to the cluster layout
|
||||||
|
|
||||||
|
Example response body:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"node": "ec79480e0ce52ae26fd00c9da684e4fa56658d9c64cdcecb094e936de0bfe71f",
|
||||||
|
"garage_version": "git:v0.8.0",
|
||||||
|
"knownNodes": {
|
||||||
|
"ec79480e0ce52ae26fd00c9da684e4fa56658d9c64cdcecb094e936de0bfe71f": {
|
||||||
|
"addr": "10.0.0.11:3901",
|
||||||
|
"is_up": true,
|
||||||
|
"last_seen_secs_ago": 9,
|
||||||
|
"hostname": "node1"
|
||||||
|
},
|
||||||
|
"4a6ae5a1d0d33bf895f5bb4f0a418b7dc94c47c0dd2eb108d1158f3c8f60b0ff": {
|
||||||
|
"addr": "10.0.0.12:3901",
|
||||||
|
"is_up": true,
|
||||||
|
"last_seen_secs_ago": 1,
|
||||||
|
"hostname": "node2"
|
||||||
|
},
|
||||||
|
"23ffd0cdd375ebff573b20cc5cef38996b51c1a7d6dbcf2c6e619876e507cf27": {
|
||||||
|
"addr": "10.0.0.21:3901",
|
||||||
|
"is_up": true,
|
||||||
|
"last_seen_secs_ago": 7,
|
||||||
|
"hostname": "node3"
|
||||||
|
},
|
||||||
|
"e2ee7984ee65b260682086ec70026165903c86e601a4a5a501c1900afe28d84b": {
|
||||||
|
"addr": "10.0.0.22:3901",
|
||||||
|
"is_up": true,
|
||||||
|
"last_seen_secs_ago": 1,
|
||||||
|
"hostname": "node4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"layout": {
|
||||||
|
"version": 12,
|
||||||
|
"roles": {
|
||||||
|
"ec79480e0ce52ae26fd00c9da684e4fa56658d9c64cdcecb094e936de0bfe71f": {
|
||||||
|
"zone": "dc1",
|
||||||
|
"capacity": 4,
|
||||||
|
"tags": [
|
||||||
|
"node1"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"4a6ae5a1d0d33bf895f5bb4f0a418b7dc94c47c0dd2eb108d1158f3c8f60b0ff": {
|
||||||
|
"zone": "dc1",
|
||||||
|
"capacity": 6,
|
||||||
|
"tags": [
|
||||||
|
"node2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"23ffd0cdd375ebff573b20cc5cef38996b51c1a7d6dbcf2c6e619876e507cf27": {
|
||||||
|
"zone": "dc2",
|
||||||
|
"capacity": 10,
|
||||||
|
"tags": [
|
||||||
|
"node3"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"stagedRoleChanges": {
|
||||||
|
"e2ee7984ee65b260682086ec70026165903c86e601a4a5a501c1900afe28d84b": {
|
||||||
|
"zone": "dc2",
|
||||||
|
"capacity": 5,
|
||||||
|
"tags": [
|
||||||
|
"node4"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ConnectClusterNodes `POST /v0/connect`
|
||||||
|
|
||||||
|
Instructs this Garage node to connect to other Garage nodes at specified addresses.
|
||||||
|
|
||||||
|
Example request body:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
"ec79480e0ce52ae26fd00c9da684e4fa56658d9c64cdcecb094e936de0bfe71f@10.0.0.11:3901",
|
||||||
|
"4a6ae5a1d0d33bf895f5bb4f0a418b7dc94c47c0dd2eb108d1158f3c8f60b0ff@10.0.0.12:3901"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
The format of the string for a node to connect to is: `<node ID>@<ip address>:<port>`, same as in the `garage node connect` CLI call.
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"error": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"success": false,
|
||||||
|
"error": "Handshake error"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### GetClusterLayout `GET /v0/layout`
|
||||||
|
|
||||||
|
Returns the cluster's current layout in JSON, including:
|
||||||
|
|
||||||
|
- Currently configured cluster layout
|
||||||
|
- Staged changes to the cluster layout
|
||||||
|
|
||||||
|
(the info returned by this endpoint is a subset of the info returned by GetClusterStatus)
|
||||||
|
|
||||||
|
Example response body:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 12,
|
||||||
|
"roles": {
|
||||||
|
"ec79480e0ce52ae26fd00c9da684e4fa56658d9c64cdcecb094e936de0bfe71f": {
|
||||||
|
"zone": "dc1",
|
||||||
|
"capacity": 4,
|
||||||
|
"tags": [
|
||||||
|
"node1"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"4a6ae5a1d0d33bf895f5bb4f0a418b7dc94c47c0dd2eb108d1158f3c8f60b0ff": {
|
||||||
|
"zone": "dc1",
|
||||||
|
"capacity": 6,
|
||||||
|
"tags": [
|
||||||
|
"node2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"23ffd0cdd375ebff573b20cc5cef38996b51c1a7d6dbcf2c6e619876e507cf27": {
|
||||||
|
"zone": "dc2",
|
||||||
|
"capacity": 10,
|
||||||
|
"tags": [
|
||||||
|
"node3"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"stagedRoleChanges": {
|
||||||
|
"e2ee7984ee65b260682086ec70026165903c86e601a4a5a501c1900afe28d84b": {
|
||||||
|
"zone": "dc2",
|
||||||
|
"capacity": 5,
|
||||||
|
"tags": [
|
||||||
|
"node4"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### UpdateClusterLayout `POST /v0/layout`
|
||||||
|
|
||||||
|
Send modifications to the cluster layout. These modifications will
|
||||||
|
be included in the staged role changes, visible in subsequent calls
|
||||||
|
of `GetClusterLayout`. Once the set of staged changes is satisfactory,
|
||||||
|
the user may call `ApplyClusterLayout` to apply the changed changes,
|
||||||
|
or `Revert ClusterLayout` to clear all of the staged changes in
|
||||||
|
the layout.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
<node_id>: {
|
||||||
|
"capacity": <new_capacity>,
|
||||||
|
"zone": <new_zone>,
|
||||||
|
"tags": [
|
||||||
|
<new_tag>,
|
||||||
|
...
|
||||||
|
]
|
||||||
|
},
|
||||||
|
<node_id_to_remove>: null,
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Contrary to the CLI that may update only a subset of the fields
|
||||||
|
`capacity`, `zone` and `tags`, when calling this API all of these
|
||||||
|
values must be specified.
|
||||||
|
|
||||||
|
|
||||||
|
#### ApplyClusterLayout `POST /v0/layout/apply`
|
||||||
|
|
||||||
|
Applies to the cluster the layout changes currently registered as
|
||||||
|
staged layout changes.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 13
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Similarly to the CLI, the body must include the version of the new layout
|
||||||
|
that will be created, which MUST be 1 + the value of the currently
|
||||||
|
existing layout in the cluster.
|
||||||
|
|
||||||
|
#### RevertClusterLayout `POST /v0/layout/revert`
|
||||||
|
|
||||||
|
Clears all of the staged layout changes.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 13
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Reverting the staged changes is done by incrementing the version number
|
||||||
|
and clearing the contents of the staged change list.
|
||||||
|
Similarly to the CLI, the body must include the incremented
|
||||||
|
version number, which MUST be 1 + the value of the currently
|
||||||
|
existing layout in the cluster.
|
||||||
|
|
||||||
|
|
||||||
|
### Access key operations
|
||||||
|
|
||||||
|
#### ListKeys `GET /v0/key`
|
||||||
|
|
||||||
|
Returns all API access keys in the cluster.
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"name": "test"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "GKe10061ac9c2921f09e4c5540",
|
||||||
|
"name": "test2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### CreateKey `POST /v0/key`
|
||||||
|
|
||||||
|
Creates a new API access key.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "NameOfMyKey"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ImportKey `POST /v0/key/import`
|
||||||
|
|
||||||
|
Imports an existing API key.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"secretAccessKey": "b892c0665f0ada8a4755dae98baa3b133590e11dae3bcc1f9d769d67f16c3835",
|
||||||
|
"name": "NameOfMyKey"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### GetKeyInfo `GET /v0/key?id=<acces key id>`
|
||||||
|
#### GetKeyInfo `GET /v0/key?search=<pattern>`
|
||||||
|
|
||||||
|
Returns information about the requested API access key.
|
||||||
|
|
||||||
|
If `id` is set, the key is looked up using its exact identifier (faster).
|
||||||
|
If `search` is set, the key is looked up using its name or prefix
|
||||||
|
of identifier (slower, all keys are enumerated to do this).
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"secretAccessKey": "b892c0665f0ada8a4755dae98baa3b133590e11dae3bcc1f9d769d67f16c3835",
|
||||||
|
"permissions": {
|
||||||
|
"createBucket": false
|
||||||
|
},
|
||||||
|
"buckets": [
|
||||||
|
{
|
||||||
|
"id": "70dc3bed7fe83a75e46b66e7ddef7d56e65f3c02f9f80b6749fb97eccb5e1033",
|
||||||
|
"globalAliases": [
|
||||||
|
"test2"
|
||||||
|
],
|
||||||
|
"localAliases": [],
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "d7452a935e663fc1914f3a5515163a6d3724010ce8dfd9e4743ca8be5974f995",
|
||||||
|
"globalAliases": [
|
||||||
|
"test3"
|
||||||
|
],
|
||||||
|
"localAliases": [],
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "e6a14cd6a27f48684579ec6b381c078ab11697e6bc8513b72b2f5307e25fff9b",
|
||||||
|
"globalAliases": [],
|
||||||
|
"localAliases": [
|
||||||
|
"test"
|
||||||
|
],
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "96470e0df00ec28807138daf01915cfda2bee8eccc91dea9558c0b4855b5bf95",
|
||||||
|
"globalAliases": [
|
||||||
|
"alex"
|
||||||
|
],
|
||||||
|
"localAliases": [],
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### DeleteKey `DELETE /v0/key?id=<acces key id>`
|
||||||
|
|
||||||
|
Deletes an API access key.
|
||||||
|
|
||||||
|
#### UpdateKey `POST /v0/key?id=<acces key id>`
|
||||||
|
|
||||||
|
Updates information about the specified API access key.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "NameOfMyKey",
|
||||||
|
"allow": {
|
||||||
|
"createBucket": true,
|
||||||
|
},
|
||||||
|
"deny": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
All fields (`name`, `allow` and `deny`) are optionnal.
|
||||||
|
If they are present, the corresponding modifications are applied to the key, otherwise nothing is changed.
|
||||||
|
The possible flags in `allow` and `deny` are: `createBucket`.
|
||||||
|
|
||||||
|
|
||||||
|
### Bucket operations
|
||||||
|
|
||||||
|
#### ListBuckets `GET /v0/bucket`
|
||||||
|
|
||||||
|
Returns all storage buckets in the cluster.
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "70dc3bed7fe83a75e46b66e7ddef7d56e65f3c02f9f80b6749fb97eccb5e1033",
|
||||||
|
"globalAliases": [
|
||||||
|
"test2"
|
||||||
|
],
|
||||||
|
"localAliases": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "96470e0df00ec28807138daf01915cfda2bee8eccc91dea9558c0b4855b5bf95",
|
||||||
|
"globalAliases": [
|
||||||
|
"alex"
|
||||||
|
],
|
||||||
|
"localAliases": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "d7452a935e663fc1914f3a5515163a6d3724010ce8dfd9e4743ca8be5974f995",
|
||||||
|
"globalAliases": [
|
||||||
|
"test3"
|
||||||
|
],
|
||||||
|
"localAliases": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "e6a14cd6a27f48684579ec6b381c078ab11697e6bc8513b72b2f5307e25fff9b",
|
||||||
|
"globalAliases": [],
|
||||||
|
"localAliases": [
|
||||||
|
{
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"alias": "test"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### GetBucketInfo `GET /v0/bucket?id=<bucket id>`
|
||||||
|
#### GetBucketInfo `GET /v0/bucket?globalAlias=<alias>`
|
||||||
|
|
||||||
|
Returns information about the requested storage bucket.
|
||||||
|
|
||||||
|
If `id` is set, the bucket is looked up using its exact identifier.
|
||||||
|
If `globalAlias` is set, the bucket is looked up using its global alias.
|
||||||
|
(both are fast)
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "e6a14cd6a27f48684579ec6b381c078ab11697e6bc8513b72b2f5307e25fff9b",
|
||||||
|
"globalAliases": [
|
||||||
|
"alex"
|
||||||
|
],
|
||||||
|
"keys": [
|
||||||
|
{
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"name": "alex",
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": true
|
||||||
|
},
|
||||||
|
"bucketLocalAliases": [
|
||||||
|
"test"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### CreateBucket `POST /v0/bucket`
|
||||||
|
|
||||||
|
Creates a new storage bucket.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"globalAlias": "NameOfMyBucket"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
OR
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"localAlias": {
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"alias": "NameOfMyBucket",
|
||||||
|
"allow": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
OR
|
||||||
|
|
||||||
|
```json
|
||||||
|
{}
|
||||||
|
```
|
||||||
|
|
||||||
|
Creates a new bucket, either with a global alias, a local one,
|
||||||
|
or no alias at all.
|
||||||
|
|
||||||
|
Technically, you can also specify both `globalAlias` and `localAlias` and that would create
|
||||||
|
two aliases, but I don't see why you would want to do that.
|
||||||
|
|
||||||
|
#### DeleteBucket `DELETE /v0/bucket?id=<bucket id>`
|
||||||
|
|
||||||
|
Deletes a storage bucket. A bucket cannot be deleted if it is not empty.
|
||||||
|
|
||||||
|
Warning: this will delete all aliases associated with the bucket!
|
||||||
|
|
||||||
|
#### PutBucketWebsite `PUT /v0/bucket/website?id=<bucket id>`
|
||||||
|
|
||||||
|
Sets the website configuration for a bucket (this also enables website access for this bucket).
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"indexDocument": "index.html",
|
||||||
|
"errorDocument": "404.html"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The field `errorDocument` is optional, if no error document is set a generic error message is displayed when errors happen.
|
||||||
|
|
||||||
|
|
||||||
|
#### DeleteBucketWebsite `DELETE /v0/bucket/website?id=<bucket id>`
|
||||||
|
|
||||||
|
Deletes the website configuration for a bucket (disables website access for this bucket).
|
||||||
|
|
||||||
|
|
||||||
|
### Operations on permissions for keys on buckets
|
||||||
|
|
||||||
|
#### BucketAllowKey `POST /v0/bucket/allow`
|
||||||
|
|
||||||
|
Allows a key to do read/write/owner operations on a bucket.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"bucketId": "e6a14cd6a27f48684579ec6b381c078ab11697e6bc8513b72b2f5307e25fff9b",
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"permissions": {
|
||||||
|
"read": true,
|
||||||
|
"write": true,
|
||||||
|
"owner": true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Flags in `permissions` which have the value `true` will be activated.
|
||||||
|
Other flags will remain unchanged.
|
||||||
|
|
||||||
|
#### BucketDenyKey `POST /v0/bucket/deny`
|
||||||
|
|
||||||
|
Denies a key from doing read/write/owner operations on a bucket.
|
||||||
|
|
||||||
|
Request body format:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"bucketId": "e6a14cd6a27f48684579ec6b381c078ab11697e6bc8513b72b2f5307e25fff9b",
|
||||||
|
"accessKeyId": "GK31c2f218a2e44f485b94239e",
|
||||||
|
"permissions": {
|
||||||
|
"read": false,
|
||||||
|
"write": false,
|
||||||
|
"owner": true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Flags in `permissions` which have the value `true` will be deactivated.
|
||||||
|
Other flags will remain unchanged.
|
||||||
|
|
||||||
|
|
||||||
|
### Operations on bucket aliases
|
||||||
|
|
||||||
|
#### GlobalAliasBucket `PUT /v0/bucket/alias/global?id=<bucket id>&alias=<global alias>`
|
||||||
|
|
||||||
|
Empty body. Creates a global alias for a bucket.
|
||||||
|
|
||||||
|
#### GlobalUnaliasBucket `DELETE /v0/bucket/alias/global?id=<bucket id>&alias=<global alias>`
|
||||||
|
|
||||||
|
Removes a global alias for a bucket.
|
||||||
|
|
||||||
|
#### LocalAliasBucket `PUT /v0/bucket/alias/local?id=<bucket id>&accessKeyId=<access key ID>&alias=<local alias>`
|
||||||
|
|
||||||
|
Empty body. Creates a local alias for a bucket in the namespace of a specific access key.
|
||||||
|
|
||||||
|
#### LocalUnaliasBucket `DELETE /v0/bucket/alias/local?id=<bucket id>&accessKeyId<access key ID>&alias=<local alias>`
|
||||||
|
|
||||||
|
Removes a local alias for a bucket in the namespace of a specific access key.
|
||||||
|
|
@ -1,29 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "garage_admin"
|
|
||||||
version = "0.7.0"
|
|
||||||
authors = ["Maximilien Richer <code@mricher.fr>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "AGPL-3.0"
|
|
||||||
description = "Administration and metrics REST HTTP server for Garage"
|
|
||||||
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "lib.rs"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
garage_util = { version = "0.7.0", path = "../util" }
|
|
||||||
|
|
||||||
hex = "0.4"
|
|
||||||
|
|
||||||
futures = "0.3"
|
|
||||||
futures-util = "0.3"
|
|
||||||
http = "0.2"
|
|
||||||
hyper = "0.14"
|
|
||||||
tracing = "0.1.30"
|
|
||||||
|
|
||||||
opentelemetry = { version = "0.17", features = [ "rt-tokio" ] }
|
|
||||||
opentelemetry-prometheus = "0.10"
|
|
||||||
opentelemetry-otlp = "0.10"
|
|
||||||
prometheus = "0.13"
|
|
@ -1,6 +0,0 @@
|
|||||||
//! Crate for handling the admin and metric HTTP APIs
|
|
||||||
#[macro_use]
|
|
||||||
extern crate tracing;
|
|
||||||
|
|
||||||
pub mod metrics;
|
|
||||||
pub mod tracing_setup;
|
|
@ -1,146 +0,0 @@
|
|||||||
use std::convert::Infallible;
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::SystemTime;
|
|
||||||
|
|
||||||
use futures::future::*;
|
|
||||||
use hyper::{
|
|
||||||
header::CONTENT_TYPE,
|
|
||||||
service::{make_service_fn, service_fn},
|
|
||||||
Body, Method, Request, Response, Server,
|
|
||||||
};
|
|
||||||
|
|
||||||
use opentelemetry::{
|
|
||||||
global,
|
|
||||||
metrics::{BoundCounter, BoundValueRecorder},
|
|
||||||
trace::{FutureExt, TraceContextExt, Tracer},
|
|
||||||
Context,
|
|
||||||
};
|
|
||||||
use opentelemetry_prometheus::PrometheusExporter;
|
|
||||||
|
|
||||||
use prometheus::{Encoder, TextEncoder};
|
|
||||||
|
|
||||||
use garage_util::error::Error as GarageError;
|
|
||||||
use garage_util::metrics::*;
|
|
||||||
|
|
||||||
// serve_req on metric endpoint
|
|
||||||
async fn serve_req(
|
|
||||||
req: Request<Body>,
|
|
||||||
admin_server: Arc<AdminServer>,
|
|
||||||
) -> Result<Response<Body>, hyper::Error> {
|
|
||||||
debug!("Receiving request at path {}", req.uri());
|
|
||||||
let request_start = SystemTime::now();
|
|
||||||
|
|
||||||
admin_server.metrics.http_counter.add(1);
|
|
||||||
|
|
||||||
let response = match (req.method(), req.uri().path()) {
|
|
||||||
(&Method::GET, "/metrics") => {
|
|
||||||
let mut buffer = vec![];
|
|
||||||
let encoder = TextEncoder::new();
|
|
||||||
|
|
||||||
let tracer = opentelemetry::global::tracer("garage");
|
|
||||||
let metric_families = tracer.in_span("admin/gather_metrics", |_| {
|
|
||||||
admin_server.exporter.registry().gather()
|
|
||||||
});
|
|
||||||
|
|
||||||
encoder.encode(&metric_families, &mut buffer).unwrap();
|
|
||||||
admin_server
|
|
||||||
.metrics
|
|
||||||
.http_body_gauge
|
|
||||||
.record(buffer.len() as u64);
|
|
||||||
|
|
||||||
Response::builder()
|
|
||||||
.status(200)
|
|
||||||
.header(CONTENT_TYPE, encoder.format_type())
|
|
||||||
.body(Body::from(buffer))
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
_ => Response::builder()
|
|
||||||
.status(404)
|
|
||||||
.body(Body::from("Not implemented"))
|
|
||||||
.unwrap(),
|
|
||||||
};
|
|
||||||
|
|
||||||
admin_server
|
|
||||||
.metrics
|
|
||||||
.http_req_histogram
|
|
||||||
.record(request_start.elapsed().map_or(0.0, |d| d.as_secs_f64()));
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
// AdminServer hold the admin server internal admin_server and the metric exporter
|
|
||||||
pub struct AdminServer {
|
|
||||||
exporter: PrometheusExporter,
|
|
||||||
metrics: AdminServerMetrics,
|
|
||||||
}
|
|
||||||
|
|
||||||
// GarageMetricadmin_server holds the metrics counter definition for Garage
|
|
||||||
// FIXME: we would rather have that split up among the different libraries?
|
|
||||||
struct AdminServerMetrics {
|
|
||||||
http_counter: BoundCounter<u64>,
|
|
||||||
http_body_gauge: BoundValueRecorder<u64>,
|
|
||||||
http_req_histogram: BoundValueRecorder<f64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AdminServer {
|
|
||||||
/// init initilialize the AdminServer and background metric server
|
|
||||||
pub fn init() -> AdminServer {
|
|
||||||
let exporter = opentelemetry_prometheus::exporter().init();
|
|
||||||
let meter = global::meter("garage/admin_server");
|
|
||||||
AdminServer {
|
|
||||||
exporter,
|
|
||||||
metrics: AdminServerMetrics {
|
|
||||||
http_counter: meter
|
|
||||||
.u64_counter("admin.http_requests_total")
|
|
||||||
.with_description("Total number of HTTP requests made.")
|
|
||||||
.init()
|
|
||||||
.bind(&[]),
|
|
||||||
http_body_gauge: meter
|
|
||||||
.u64_value_recorder("admin.http_response_size_bytes")
|
|
||||||
.with_description("The metrics HTTP response sizes in bytes.")
|
|
||||||
.init()
|
|
||||||
.bind(&[]),
|
|
||||||
http_req_histogram: meter
|
|
||||||
.f64_value_recorder("admin.http_request_duration_seconds")
|
|
||||||
.with_description("The HTTP request latencies in seconds.")
|
|
||||||
.init()
|
|
||||||
.bind(&[]),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// run execute the admin server on the designated HTTP port and listen for requests
|
|
||||||
pub async fn run(
|
|
||||||
self,
|
|
||||||
bind_addr: SocketAddr,
|
|
||||||
shutdown_signal: impl Future<Output = ()>,
|
|
||||||
) -> Result<(), GarageError> {
|
|
||||||
let admin_server = Arc::new(self);
|
|
||||||
// For every connection, we must make a `Service` to handle all
|
|
||||||
// incoming HTTP requests on said connection.
|
|
||||||
let make_svc = make_service_fn(move |_conn| {
|
|
||||||
let admin_server = admin_server.clone();
|
|
||||||
// This is the `Service` that will handle the connection.
|
|
||||||
// `service_fn` is a helper to convert a function that
|
|
||||||
// returns a Response into a `Service`.
|
|
||||||
async move {
|
|
||||||
Ok::<_, Infallible>(service_fn(move |req| {
|
|
||||||
let tracer = opentelemetry::global::tracer("garage");
|
|
||||||
let span = tracer
|
|
||||||
.span_builder("admin/request")
|
|
||||||
.with_trace_id(gen_trace_id())
|
|
||||||
.start(&tracer);
|
|
||||||
|
|
||||||
serve_req(req, admin_server.clone())
|
|
||||||
.with_context(Context::current_with_span(span))
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let server = Server::bind(&bind_addr).serve(make_svc);
|
|
||||||
let graceful = server.with_graceful_shutdown(shutdown_signal);
|
|
||||||
info!("Admin server listening on http://{}", bind_addr);
|
|
||||||
|
|
||||||
graceful.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
@ -54,6 +54,9 @@ quick-xml = { version = "0.21", features = [ "serialize" ] }
|
|||||||
url = "2.1"
|
url = "2.1"
|
||||||
|
|
||||||
opentelemetry = "0.17"
|
opentelemetry = "0.17"
|
||||||
|
opentelemetry-prometheus = "0.10"
|
||||||
|
opentelemetry-otlp = "0.10"
|
||||||
|
prometheus = "0.13"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
k2v = [ "garage_util/k2v", "garage_model/k2v" ]
|
k2v = [ "garage_util/k2v", "garage_model/k2v" ]
|
||||||
|
199
src/api/admin/api_server.rs
Normal file
199
src/api/admin/api_server.rs
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
|
use futures::future::Future;
|
||||||
|
use http::header::{
|
||||||
|
ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ALLOW, CONTENT_TYPE,
|
||||||
|
};
|
||||||
|
use hyper::{Body, Request, Response};
|
||||||
|
|
||||||
|
use opentelemetry::trace::{SpanRef, Tracer};
|
||||||
|
use opentelemetry_prometheus::PrometheusExporter;
|
||||||
|
use prometheus::{Encoder, TextEncoder};
|
||||||
|
|
||||||
|
use garage_model::garage::Garage;
|
||||||
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
|
use crate::generic_server::*;
|
||||||
|
|
||||||
|
use crate::admin::bucket::*;
|
||||||
|
use crate::admin::cluster::*;
|
||||||
|
use crate::admin::error::*;
|
||||||
|
use crate::admin::key::*;
|
||||||
|
use crate::admin::router::{Authorization, Endpoint};
|
||||||
|
|
||||||
|
pub struct AdminApiServer {
|
||||||
|
garage: Arc<Garage>,
|
||||||
|
exporter: PrometheusExporter,
|
||||||
|
metrics_token: Option<String>,
|
||||||
|
admin_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AdminApiServer {
|
||||||
|
pub fn new(garage: Arc<Garage>) -> Self {
|
||||||
|
let exporter = opentelemetry_prometheus::exporter().init();
|
||||||
|
let cfg = &garage.config.admin;
|
||||||
|
let metrics_token = cfg
|
||||||
|
.metrics_token
|
||||||
|
.as_ref()
|
||||||
|
.map(|tok| format!("Bearer {}", tok));
|
||||||
|
let admin_token = cfg
|
||||||
|
.admin_token
|
||||||
|
.as_ref()
|
||||||
|
.map(|tok| format!("Bearer {}", tok));
|
||||||
|
Self {
|
||||||
|
garage,
|
||||||
|
exporter,
|
||||||
|
metrics_token,
|
||||||
|
admin_token,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(self, shutdown_signal: impl Future<Output = ()>) -> Result<(), GarageError> {
|
||||||
|
if let Some(bind_addr) = self.garage.config.admin.api_bind_addr {
|
||||||
|
let region = self.garage.config.s3_api.s3_region.clone();
|
||||||
|
ApiServer::new(region, self)
|
||||||
|
.run_server(bind_addr, shutdown_signal)
|
||||||
|
.await
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_options(&self, _req: &Request<Body>) -> Result<Response<Body>, Error> {
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(204)
|
||||||
|
.header(ALLOW, "OPTIONS, GET, POST")
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_METHODS, "OPTIONS, GET, POST")
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_metrics(&self) -> Result<Response<Body>, Error> {
|
||||||
|
let mut buffer = vec![];
|
||||||
|
let encoder = TextEncoder::new();
|
||||||
|
|
||||||
|
let tracer = opentelemetry::global::tracer("garage");
|
||||||
|
let metric_families = tracer.in_span("admin/gather_metrics", |_| {
|
||||||
|
self.exporter.registry().gather()
|
||||||
|
});
|
||||||
|
|
||||||
|
encoder
|
||||||
|
.encode(&metric_families, &mut buffer)
|
||||||
|
.ok_or_internal_error("Could not serialize metrics")?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header(CONTENT_TYPE, encoder.format_type())
|
||||||
|
.body(Body::from(buffer))?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ApiHandler for AdminApiServer {
|
||||||
|
const API_NAME: &'static str = "admin";
|
||||||
|
const API_NAME_DISPLAY: &'static str = "Admin";
|
||||||
|
|
||||||
|
type Endpoint = Endpoint;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
fn parse_endpoint(&self, req: &Request<Body>) -> Result<Endpoint, Error> {
|
||||||
|
Endpoint::from_request(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle(
|
||||||
|
&self,
|
||||||
|
req: Request<Body>,
|
||||||
|
endpoint: Endpoint,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let expected_auth_header =
|
||||||
|
match endpoint.authorization_type() {
|
||||||
|
Authorization::MetricsToken => self.metrics_token.as_ref(),
|
||||||
|
Authorization::AdminToken => match &self.admin_token {
|
||||||
|
None => return Err(Error::forbidden(
|
||||||
|
"Admin token isn't configured, admin API access is disabled for security.",
|
||||||
|
)),
|
||||||
|
Some(t) => Some(t),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(h) = expected_auth_header {
|
||||||
|
match req.headers().get("Authorization") {
|
||||||
|
None => return Err(Error::forbidden("Authorization token must be provided")),
|
||||||
|
Some(v) => {
|
||||||
|
let authorized = v.to_str().map(|hv| hv.trim() == h).unwrap_or(false);
|
||||||
|
if !authorized {
|
||||||
|
return Err(Error::forbidden("Invalid authorization token provided"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match endpoint {
|
||||||
|
Endpoint::Options => self.handle_options(&req),
|
||||||
|
Endpoint::Metrics => self.handle_metrics(),
|
||||||
|
Endpoint::GetClusterStatus => handle_get_cluster_status(&self.garage).await,
|
||||||
|
Endpoint::ConnectClusterNodes => handle_connect_cluster_nodes(&self.garage, req).await,
|
||||||
|
// Layout
|
||||||
|
Endpoint::GetClusterLayout => handle_get_cluster_layout(&self.garage).await,
|
||||||
|
Endpoint::UpdateClusterLayout => handle_update_cluster_layout(&self.garage, req).await,
|
||||||
|
Endpoint::ApplyClusterLayout => handle_apply_cluster_layout(&self.garage, req).await,
|
||||||
|
Endpoint::RevertClusterLayout => handle_revert_cluster_layout(&self.garage, req).await,
|
||||||
|
// Keys
|
||||||
|
Endpoint::ListKeys => handle_list_keys(&self.garage).await,
|
||||||
|
Endpoint::GetKeyInfo { id, search } => {
|
||||||
|
handle_get_key_info(&self.garage, id, search).await
|
||||||
|
}
|
||||||
|
Endpoint::CreateKey => handle_create_key(&self.garage, req).await,
|
||||||
|
Endpoint::ImportKey => handle_import_key(&self.garage, req).await,
|
||||||
|
Endpoint::UpdateKey { id } => handle_update_key(&self.garage, id, req).await,
|
||||||
|
Endpoint::DeleteKey { id } => handle_delete_key(&self.garage, id).await,
|
||||||
|
// Buckets
|
||||||
|
Endpoint::ListBuckets => handle_list_buckets(&self.garage).await,
|
||||||
|
Endpoint::GetBucketInfo { id, global_alias } => {
|
||||||
|
handle_get_bucket_info(&self.garage, id, global_alias).await
|
||||||
|
}
|
||||||
|
Endpoint::CreateBucket => handle_create_bucket(&self.garage, req).await,
|
||||||
|
Endpoint::DeleteBucket { id } => handle_delete_bucket(&self.garage, id).await,
|
||||||
|
Endpoint::PutBucketWebsite { id } => {
|
||||||
|
handle_put_bucket_website(&self.garage, id, req).await
|
||||||
|
}
|
||||||
|
Endpoint::DeleteBucketWebsite { id } => {
|
||||||
|
handle_delete_bucket_website(&self.garage, id).await
|
||||||
|
}
|
||||||
|
// Bucket-key permissions
|
||||||
|
Endpoint::BucketAllowKey => {
|
||||||
|
handle_bucket_change_key_perm(&self.garage, req, true).await
|
||||||
|
}
|
||||||
|
Endpoint::BucketDenyKey => {
|
||||||
|
handle_bucket_change_key_perm(&self.garage, req, false).await
|
||||||
|
}
|
||||||
|
// Bucket aliasing
|
||||||
|
Endpoint::GlobalAliasBucket { id, alias } => {
|
||||||
|
handle_global_alias_bucket(&self.garage, id, alias).await
|
||||||
|
}
|
||||||
|
Endpoint::GlobalUnaliasBucket { id, alias } => {
|
||||||
|
handle_global_unalias_bucket(&self.garage, id, alias).await
|
||||||
|
}
|
||||||
|
Endpoint::LocalAliasBucket {
|
||||||
|
id,
|
||||||
|
access_key_id,
|
||||||
|
alias,
|
||||||
|
} => handle_local_alias_bucket(&self.garage, id, access_key_id, alias).await,
|
||||||
|
Endpoint::LocalUnaliasBucket {
|
||||||
|
id,
|
||||||
|
access_key_id,
|
||||||
|
alias,
|
||||||
|
} => handle_local_unalias_bucket(&self.garage, id, access_key_id, alias).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiEndpoint for Endpoint {
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
Endpoint::name(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_span_attributes(&self, _span: SpanRef<'_>) {}
|
||||||
|
}
|
549
src/api/admin/bucket.rs
Normal file
549
src/api/admin/bucket.rs
Normal file
@ -0,0 +1,549 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hyper::{Body, Request, Response, StatusCode};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use garage_util::crdt::*;
|
||||||
|
use garage_util::data::*;
|
||||||
|
use garage_util::error::Error as GarageError;
|
||||||
|
use garage_util::time::*;
|
||||||
|
|
||||||
|
use garage_table::*;
|
||||||
|
|
||||||
|
use garage_model::bucket_alias_table::*;
|
||||||
|
use garage_model::bucket_table::*;
|
||||||
|
use garage_model::garage::Garage;
|
||||||
|
use garage_model::permission::*;
|
||||||
|
|
||||||
|
use crate::admin::error::*;
|
||||||
|
use crate::admin::key::ApiBucketKeyPerm;
|
||||||
|
use crate::common_error::CommonError;
|
||||||
|
use crate::helpers::parse_json_body;
|
||||||
|
|
||||||
|
pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<Body>, Error> {
|
||||||
|
let buckets = garage
|
||||||
|
.bucket_table
|
||||||
|
.get_range(
|
||||||
|
&EmptyKey,
|
||||||
|
None,
|
||||||
|
Some(DeletedFilter::NotDeleted),
|
||||||
|
10000,
|
||||||
|
EnumerationOrder::Forward,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let res = buckets
|
||||||
|
.into_iter()
|
||||||
|
.map(|b| {
|
||||||
|
let state = b.state.as_option().unwrap();
|
||||||
|
ListBucketResultItem {
|
||||||
|
id: hex::encode(b.id),
|
||||||
|
global_aliases: state
|
||||||
|
.aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, a)| *a)
|
||||||
|
.map(|(n, _, _)| n.to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
local_aliases: state
|
||||||
|
.local_aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, a)| *a)
|
||||||
|
.map(|((k, n), _, _)| BucketLocalAlias {
|
||||||
|
access_key_id: k.to_string(),
|
||||||
|
alias: n.to_string(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct ListBucketResultItem {
|
||||||
|
id: String,
|
||||||
|
global_aliases: Vec<String>,
|
||||||
|
local_aliases: Vec<BucketLocalAlias>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct BucketLocalAlias {
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_get_bucket_info(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: Option<String>,
|
||||||
|
global_alias: Option<String>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = match (id, global_alias) {
|
||||||
|
(Some(id), None) => parse_bucket_id(&id)?,
|
||||||
|
(None, Some(ga)) => garage
|
||||||
|
.bucket_helper()
|
||||||
|
.resolve_global_bucket_name(&ga)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| HelperError::NoSuchBucket(ga.to_string()))?,
|
||||||
|
_ => {
|
||||||
|
return Err(Error::bad_request(
|
||||||
|
"Either id or globalAlias must be provided (but not both)",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn bucket_info_results(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
bucket_id: Uuid,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.get_existing_bucket(bucket_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut relevant_keys = HashMap::new();
|
||||||
|
for (k, _) in bucket
|
||||||
|
.state
|
||||||
|
.as_option()
|
||||||
|
.unwrap()
|
||||||
|
.authorized_keys
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
{
|
||||||
|
if let Some(key) = garage
|
||||||
|
.key_table
|
||||||
|
.get(&EmptyKey, k)
|
||||||
|
.await?
|
||||||
|
.filter(|k| !k.is_deleted())
|
||||||
|
{
|
||||||
|
if !key.state.is_deleted() {
|
||||||
|
relevant_keys.insert(k.clone(), key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for ((k, _), _, _) in bucket
|
||||||
|
.state
|
||||||
|
.as_option()
|
||||||
|
.unwrap()
|
||||||
|
.local_aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
{
|
||||||
|
if relevant_keys.contains_key(k) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Some(key) = garage.key_table.get(&EmptyKey, k).await? {
|
||||||
|
if !key.state.is_deleted() {
|
||||||
|
relevant_keys.insert(k.clone(), key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let state = bucket.state.as_option().unwrap();
|
||||||
|
|
||||||
|
let res =
|
||||||
|
GetBucketInfoResult {
|
||||||
|
id: hex::encode(&bucket.id),
|
||||||
|
global_aliases: state
|
||||||
|
.aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, a)| *a)
|
||||||
|
.map(|(n, _, _)| n.to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
website_access: state.website_config.get().is_some(),
|
||||||
|
website_config: state.website_config.get().clone().map(|wsc| {
|
||||||
|
GetBucketInfoWebsiteResult {
|
||||||
|
index_document: wsc.index_document,
|
||||||
|
error_document: wsc.error_document,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
keys: relevant_keys
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, key)| {
|
||||||
|
let p = key.state.as_option().unwrap();
|
||||||
|
GetBucketInfoKey {
|
||||||
|
access_key_id: key.key_id,
|
||||||
|
name: p.name.get().to_string(),
|
||||||
|
permissions: p
|
||||||
|
.authorized_buckets
|
||||||
|
.get(&bucket.id)
|
||||||
|
.map(|p| ApiBucketKeyPerm {
|
||||||
|
read: p.allow_read,
|
||||||
|
write: p.allow_write,
|
||||||
|
owner: p.allow_owner,
|
||||||
|
})
|
||||||
|
.unwrap_or_default(),
|
||||||
|
bucket_local_aliases: p
|
||||||
|
.local_aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, b)| *b == Some(bucket.id))
|
||||||
|
.map(|(n, _, _)| n.to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetBucketInfoResult {
|
||||||
|
id: String,
|
||||||
|
global_aliases: Vec<String>,
|
||||||
|
website_access: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
website_config: Option<GetBucketInfoWebsiteResult>,
|
||||||
|
keys: Vec<GetBucketInfoKey>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetBucketInfoWebsiteResult {
|
||||||
|
index_document: String,
|
||||||
|
error_document: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetBucketInfoKey {
|
||||||
|
access_key_id: String,
|
||||||
|
name: String,
|
||||||
|
permissions: ApiBucketKeyPerm,
|
||||||
|
bucket_local_aliases: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_create_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<CreateBucketRequest>(req).await?;
|
||||||
|
|
||||||
|
if let Some(ga) = &req.global_alias {
|
||||||
|
if !is_valid_bucket_name(ga) {
|
||||||
|
return Err(Error::bad_request(format!(
|
||||||
|
"{}: {}",
|
||||||
|
ga, INVALID_BUCKET_NAME_MESSAGE
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(alias) = garage.bucket_alias_table.get(&EmptyKey, ga).await? {
|
||||||
|
if alias.state.get().is_some() {
|
||||||
|
return Err(CommonError::BucketAlreadyExists.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(la) = &req.local_alias {
|
||||||
|
if !is_valid_bucket_name(&la.alias) {
|
||||||
|
return Err(Error::bad_request(format!(
|
||||||
|
"{}: {}",
|
||||||
|
la.alias, INVALID_BUCKET_NAME_MESSAGE
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let key = garage
|
||||||
|
.key_helper()
|
||||||
|
.get_existing_key(&la.access_key_id)
|
||||||
|
.await?;
|
||||||
|
let state = key.state.as_option().unwrap();
|
||||||
|
if matches!(state.local_aliases.get(&la.alias), Some(_)) {
|
||||||
|
return Err(Error::bad_request("Local alias already exists"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let bucket = Bucket::new();
|
||||||
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
|
|
||||||
|
if let Some(ga) = &req.global_alias {
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_global_bucket_alias(bucket.id, ga)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(la) = &req.local_alias {
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_local_bucket_alias(bucket.id, &la.access_key_id, &la.alias)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if la.allow.read || la.allow.write || la.allow.owner {
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_bucket_key_permissions(
|
||||||
|
bucket.id,
|
||||||
|
&la.access_key_id,
|
||||||
|
BucketKeyPerm {
|
||||||
|
timestamp: now_msec(),
|
||||||
|
allow_read: la.allow.read,
|
||||||
|
allow_write: la.allow.write,
|
||||||
|
allow_owner: la.allow.owner,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket.id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct CreateBucketRequest {
|
||||||
|
global_alias: Option<String>,
|
||||||
|
local_alias: Option<CreateBucketLocalAlias>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct CreateBucketLocalAlias {
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
#[serde(default)]
|
||||||
|
allow: ApiBucketKeyPerm,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_delete_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let helper = garage.bucket_helper();
|
||||||
|
|
||||||
|
let bucket_id = parse_bucket_id(&id)?;
|
||||||
|
|
||||||
|
let mut bucket = helper.get_existing_bucket(bucket_id).await?;
|
||||||
|
let state = bucket.state.as_option().unwrap();
|
||||||
|
|
||||||
|
// Check bucket is empty
|
||||||
|
if !helper.is_bucket_empty(bucket_id).await? {
|
||||||
|
return Err(CommonError::BucketNotEmpty.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- done checking, now commit ---
|
||||||
|
// 1. delete authorization from keys that had access
|
||||||
|
for (key_id, perm) in bucket.authorized_keys() {
|
||||||
|
if perm.is_any() {
|
||||||
|
helper
|
||||||
|
.set_bucket_key_permissions(bucket.id, key_id, BucketKeyPerm::NO_PERMISSIONS)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 2. delete all local aliases
|
||||||
|
for ((key_id, alias), _, active) in state.local_aliases.items().iter() {
|
||||||
|
if *active {
|
||||||
|
helper
|
||||||
|
.unset_local_bucket_alias(bucket.id, key_id, alias)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 3. delete all global aliases
|
||||||
|
for (alias, _, active) in state.aliases.items().iter() {
|
||||||
|
if *active {
|
||||||
|
helper.purge_global_bucket_alias(bucket.id, alias).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. delete bucket
|
||||||
|
bucket.state = Deletable::delete();
|
||||||
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::NO_CONTENT)
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- BUCKET WEBSITE CONFIGURATION ----
|
||||||
|
|
||||||
|
pub async fn handle_put_bucket_website(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: String,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<PutBucketWebsiteRequest>(req).await?;
|
||||||
|
let bucket_id = parse_bucket_id(&id)?;
|
||||||
|
|
||||||
|
let mut bucket = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.get_existing_bucket(bucket_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let state = bucket.state.as_option_mut().unwrap();
|
||||||
|
state.website_config.update(Some(WebsiteConfig {
|
||||||
|
index_document: req.index_document,
|
||||||
|
error_document: req.error_document,
|
||||||
|
}));
|
||||||
|
|
||||||
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct PutBucketWebsiteRequest {
|
||||||
|
index_document: String,
|
||||||
|
#[serde(default)]
|
||||||
|
error_document: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_delete_bucket_website(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = parse_bucket_id(&id)?;
|
||||||
|
|
||||||
|
let mut bucket = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.get_existing_bucket(bucket_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let state = bucket.state.as_option_mut().unwrap();
|
||||||
|
state.website_config.update(None);
|
||||||
|
|
||||||
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- BUCKET/KEY PERMISSIONS ----
|
||||||
|
|
||||||
|
pub async fn handle_bucket_change_key_perm(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
new_perm_flag: bool,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<BucketKeyPermChangeRequest>(req).await?;
|
||||||
|
|
||||||
|
let bucket_id = parse_bucket_id(&req.bucket_id)?;
|
||||||
|
|
||||||
|
let bucket = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.get_existing_bucket(bucket_id)
|
||||||
|
.await?;
|
||||||
|
let state = bucket.state.as_option().unwrap();
|
||||||
|
|
||||||
|
let key = garage
|
||||||
|
.key_helper()
|
||||||
|
.get_existing_key(&req.access_key_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut perm = state
|
||||||
|
.authorized_keys
|
||||||
|
.get(&key.key_id)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or(BucketKeyPerm::NO_PERMISSIONS);
|
||||||
|
|
||||||
|
if req.permissions.read {
|
||||||
|
perm.allow_read = new_perm_flag;
|
||||||
|
}
|
||||||
|
if req.permissions.write {
|
||||||
|
perm.allow_write = new_perm_flag;
|
||||||
|
}
|
||||||
|
if req.permissions.owner {
|
||||||
|
perm.allow_owner = new_perm_flag;
|
||||||
|
}
|
||||||
|
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_bucket_key_permissions(bucket.id, &key.key_id, perm)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket.id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct BucketKeyPermChangeRequest {
|
||||||
|
bucket_id: String,
|
||||||
|
access_key_id: String,
|
||||||
|
permissions: ApiBucketKeyPerm,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- BUCKET ALIASES ----
|
||||||
|
|
||||||
|
pub async fn handle_global_alias_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
bucket_id: String,
|
||||||
|
alias: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = parse_bucket_id(&bucket_id)?;
|
||||||
|
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_global_bucket_alias(bucket_id, &alias)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_global_unalias_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
bucket_id: String,
|
||||||
|
alias: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = parse_bucket_id(&bucket_id)?;
|
||||||
|
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.unset_global_bucket_alias(bucket_id, &alias)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_local_alias_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
bucket_id: String,
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = parse_bucket_id(&bucket_id)?;
|
||||||
|
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.set_local_bucket_alias(bucket_id, &access_key_id, &alias)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_local_unalias_bucket(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
bucket_id: String,
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let bucket_id = parse_bucket_id(&bucket_id)?;
|
||||||
|
|
||||||
|
garage
|
||||||
|
.bucket_helper()
|
||||||
|
.unset_local_bucket_alias(bucket_id, &access_key_id, &alias)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
bucket_info_results(garage, bucket_id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- HELPER ----
|
||||||
|
|
||||||
|
fn parse_bucket_id(id: &str) -> Result<Uuid, Error> {
|
||||||
|
let id_hex = hex::decode(&id).ok_or_bad_request("Invalid bucket id")?;
|
||||||
|
Ok(Uuid::try_from(&id_hex).ok_or_bad_request("Invalid bucket id")?)
|
||||||
|
}
|
198
src/api/admin/cluster.rs
Normal file
198
src/api/admin/cluster.rs
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hyper::{Body, Request, Response, StatusCode};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use garage_util::crdt::*;
|
||||||
|
use garage_util::data::*;
|
||||||
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
|
use garage_rpc::layout::*;
|
||||||
|
|
||||||
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
|
use crate::admin::error::*;
|
||||||
|
use crate::helpers::parse_json_body;
|
||||||
|
|
||||||
|
pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<Body>, Error> {
|
||||||
|
let res = GetClusterStatusResponse {
|
||||||
|
node: hex::encode(garage.system.id),
|
||||||
|
garage_version: garage.system.garage_version(),
|
||||||
|
known_nodes: garage
|
||||||
|
.system
|
||||||
|
.get_known_nodes()
|
||||||
|
.into_iter()
|
||||||
|
.map(|i| {
|
||||||
|
(
|
||||||
|
hex::encode(i.id),
|
||||||
|
KnownNodeResp {
|
||||||
|
addr: i.addr,
|
||||||
|
is_up: i.is_up,
|
||||||
|
last_seen_secs_ago: i.last_seen_secs_ago,
|
||||||
|
hostname: i.status.hostname,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
layout: get_cluster_layout(garage),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_connect_cluster_nodes(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<Vec<String>>(req).await?;
|
||||||
|
|
||||||
|
let res = futures::future::join_all(req.iter().map(|node| garage.system.connect(node)))
|
||||||
|
.await
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| match r {
|
||||||
|
Ok(()) => ConnectClusterNodesResponse {
|
||||||
|
success: true,
|
||||||
|
error: None,
|
||||||
|
},
|
||||||
|
Err(e) => ConnectClusterNodesResponse {
|
||||||
|
success: false,
|
||||||
|
error: Some(format!("{}", e)),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_get_cluster_layout(garage: &Arc<Garage>) -> Result<Response<Body>, Error> {
|
||||||
|
let res = get_cluster_layout(garage);
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_cluster_layout(garage: &Arc<Garage>) -> GetClusterLayoutResponse {
|
||||||
|
let layout = garage.system.get_cluster_layout();
|
||||||
|
|
||||||
|
GetClusterLayoutResponse {
|
||||||
|
version: layout.version,
|
||||||
|
roles: layout
|
||||||
|
.roles
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, v)| v.0.is_some())
|
||||||
|
.map(|(k, _, v)| (hex::encode(k), v.0.clone()))
|
||||||
|
.collect(),
|
||||||
|
staged_role_changes: layout
|
||||||
|
.staging
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(k, _, v)| layout.roles.get(k) != Some(v))
|
||||||
|
.map(|(k, _, v)| (hex::encode(k), v.0.clone()))
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetClusterStatusResponse {
|
||||||
|
node: String,
|
||||||
|
garage_version: &'static str,
|
||||||
|
known_nodes: HashMap<String, KnownNodeResp>,
|
||||||
|
layout: GetClusterLayoutResponse,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct ConnectClusterNodesResponse {
|
||||||
|
success: bool,
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetClusterLayoutResponse {
|
||||||
|
version: u64,
|
||||||
|
roles: HashMap<String, Option<NodeRole>>,
|
||||||
|
staged_role_changes: HashMap<String, Option<NodeRole>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct KnownNodeResp {
|
||||||
|
addr: SocketAddr,
|
||||||
|
is_up: bool,
|
||||||
|
last_seen_secs_ago: Option<u64>,
|
||||||
|
hostname: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_update_cluster_layout(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let updates = parse_json_body::<UpdateClusterLayoutRequest>(req).await?;
|
||||||
|
|
||||||
|
let mut layout = garage.system.get_cluster_layout();
|
||||||
|
|
||||||
|
let mut roles = layout.roles.clone();
|
||||||
|
roles.merge(&layout.staging);
|
||||||
|
|
||||||
|
for (node, role) in updates {
|
||||||
|
let node = hex::decode(node).ok_or_bad_request("Invalid node identifier")?;
|
||||||
|
let node = Uuid::try_from(&node).ok_or_bad_request("Invalid node identifier")?;
|
||||||
|
|
||||||
|
layout
|
||||||
|
.staging
|
||||||
|
.merge(&roles.update_mutator(node, NodeRoleV(role)));
|
||||||
|
}
|
||||||
|
|
||||||
|
garage.system.update_cluster_layout(&layout).await?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_apply_cluster_layout(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?;
|
||||||
|
|
||||||
|
let layout = garage.system.get_cluster_layout();
|
||||||
|
let layout = layout.apply_staged_changes(Some(param.version))?;
|
||||||
|
garage.system.update_cluster_layout(&layout).await?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_revert_cluster_layout(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?;
|
||||||
|
|
||||||
|
let layout = garage.system.get_cluster_layout();
|
||||||
|
let layout = layout.revert_staged_changes(Some(param.version))?;
|
||||||
|
garage.system.update_cluster_layout(&layout).await?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateClusterLayoutRequest = HashMap<String, Option<NodeRole>>;
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct ApplyRevertLayoutRequest {
|
||||||
|
version: u64,
|
||||||
|
}
|
96
src/api/admin/error.rs
Normal file
96
src/api/admin/error.rs
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
use err_derive::Error;
|
||||||
|
use hyper::header::HeaderValue;
|
||||||
|
use hyper::{Body, HeaderMap, StatusCode};
|
||||||
|
|
||||||
|
pub use garage_model::helper::error::Error as HelperError;
|
||||||
|
|
||||||
|
use crate::common_error::CommonError;
|
||||||
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
|
use crate::generic_server::ApiError;
|
||||||
|
use crate::helpers::CustomApiErrorBody;
|
||||||
|
|
||||||
|
/// Errors of this crate
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(display = "{}", _0)]
|
||||||
|
/// Error from common error
|
||||||
|
Common(CommonError),
|
||||||
|
|
||||||
|
// Category: cannot process
|
||||||
|
/// The API access key does not exist
|
||||||
|
#[error(display = "Access key not found: {}", _0)]
|
||||||
|
NoSuchAccessKey(String),
|
||||||
|
|
||||||
|
/// In Import key, the key already exists
|
||||||
|
#[error(
|
||||||
|
display = "Key {} already exists in data store. Even if it is deleted, we can't let you create a new key with the same ID. Sorry.",
|
||||||
|
_0
|
||||||
|
)]
|
||||||
|
KeyAlreadyExists(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
CommonError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::Common(CommonError::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
|
impl From<HelperError> for Error {
|
||||||
|
fn from(err: HelperError) -> Self {
|
||||||
|
match err {
|
||||||
|
HelperError::Internal(i) => Self::Common(CommonError::InternalError(i)),
|
||||||
|
HelperError::BadRequest(b) => Self::Common(CommonError::BadRequest(b)),
|
||||||
|
HelperError::InvalidBucketName(n) => Self::Common(CommonError::InvalidBucketName(n)),
|
||||||
|
HelperError::NoSuchBucket(n) => Self::Common(CommonError::NoSuchBucket(n)),
|
||||||
|
HelperError::NoSuchAccessKey(n) => Self::NoSuchAccessKey(n),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
fn code(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Error::Common(c) => c.aws_code(),
|
||||||
|
Error::NoSuchAccessKey(_) => "NoSuchAccessKey",
|
||||||
|
Error::KeyAlreadyExists(_) => "KeyAlreadyExists",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiError for Error {
|
||||||
|
/// Get the HTTP status code that best represents the meaning of the error for the client
|
||||||
|
fn http_status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::Common(c) => c.http_status_code(),
|
||||||
|
Error::NoSuchAccessKey(_) => StatusCode::NOT_FOUND,
|
||||||
|
Error::KeyAlreadyExists(_) => StatusCode::CONFLICT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_http_headers(&self, _header_map: &mut HeaderMap<HeaderValue>) {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
fn http_body(&self, garage_region: &str, path: &str) -> Body {
|
||||||
|
let error = CustomApiErrorBody {
|
||||||
|
code: self.code().to_string(),
|
||||||
|
message: format!("{}", self),
|
||||||
|
path: path.to_string(),
|
||||||
|
region: garage_region.to_string(),
|
||||||
|
};
|
||||||
|
Body::from(serde_json::to_string_pretty(&error).unwrap_or_else(|_| {
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"code": "InternalError",
|
||||||
|
"message": "JSON encoding of error failed"
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
264
src/api/admin/key.rs
Normal file
264
src/api/admin/key.rs
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hyper::{Body, Request, Response, StatusCode};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
|
use garage_table::*;
|
||||||
|
|
||||||
|
use garage_model::garage::Garage;
|
||||||
|
use garage_model::key_table::*;
|
||||||
|
|
||||||
|
use crate::admin::error::*;
|
||||||
|
use crate::helpers::parse_json_body;
|
||||||
|
|
||||||
|
pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<Body>, Error> {
|
||||||
|
let res = garage
|
||||||
|
.key_table
|
||||||
|
.get_range(
|
||||||
|
&EmptyKey,
|
||||||
|
None,
|
||||||
|
Some(KeyFilter::Deleted(DeletedFilter::NotDeleted)),
|
||||||
|
10000,
|
||||||
|
EnumerationOrder::Forward,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.iter()
|
||||||
|
.map(|k| ListKeyResultItem {
|
||||||
|
id: k.key_id.to_string(),
|
||||||
|
name: k.params().unwrap().name.get().clone(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct ListKeyResultItem {
|
||||||
|
id: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_get_key_info(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: Option<String>,
|
||||||
|
search: Option<String>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let key = if let Some(id) = id {
|
||||||
|
garage.key_helper().get_existing_key(&id).await?
|
||||||
|
} else if let Some(search) = search {
|
||||||
|
garage
|
||||||
|
.key_helper()
|
||||||
|
.get_existing_matching_key(&search)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
key_info_results(garage, key).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_create_key(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<CreateKeyRequest>(req).await?;
|
||||||
|
|
||||||
|
let key = Key::new(&req.name);
|
||||||
|
garage.key_table.insert(&key).await?;
|
||||||
|
|
||||||
|
key_info_results(garage, key).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct CreateKeyRequest {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_import_key(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<ImportKeyRequest>(req).await?;
|
||||||
|
|
||||||
|
let prev_key = garage.key_table.get(&EmptyKey, &req.access_key_id).await?;
|
||||||
|
if prev_key.is_some() {
|
||||||
|
return Err(Error::KeyAlreadyExists(req.access_key_id.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let imported_key = Key::import(&req.access_key_id, &req.secret_access_key, &req.name);
|
||||||
|
garage.key_table.insert(&imported_key).await?;
|
||||||
|
|
||||||
|
key_info_results(garage, imported_key).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct ImportKeyRequest {
|
||||||
|
access_key_id: String,
|
||||||
|
secret_access_key: String,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_update_key(
|
||||||
|
garage: &Arc<Garage>,
|
||||||
|
id: String,
|
||||||
|
req: Request<Body>,
|
||||||
|
) -> Result<Response<Body>, Error> {
|
||||||
|
let req = parse_json_body::<UpdateKeyRequest>(req).await?;
|
||||||
|
|
||||||
|
let mut key = garage.key_helper().get_existing_key(&id).await?;
|
||||||
|
|
||||||
|
let key_state = key.state.as_option_mut().unwrap();
|
||||||
|
|
||||||
|
if let Some(new_name) = req.name {
|
||||||
|
key_state.name.update(new_name);
|
||||||
|
}
|
||||||
|
if let Some(allow) = req.allow {
|
||||||
|
if allow.create_bucket {
|
||||||
|
key_state.allow_create_bucket.update(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(deny) = req.deny {
|
||||||
|
if deny.create_bucket {
|
||||||
|
key_state.allow_create_bucket.update(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
garage.key_table.insert(&key).await?;
|
||||||
|
|
||||||
|
key_info_results(garage, key).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct UpdateKeyRequest {
|
||||||
|
name: Option<String>,
|
||||||
|
allow: Option<KeyPerm>,
|
||||||
|
deny: Option<KeyPerm>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_delete_key(garage: &Arc<Garage>, id: String) -> Result<Response<Body>, Error> {
|
||||||
|
let mut key = garage.key_helper().get_existing_key(&id).await?;
|
||||||
|
|
||||||
|
key.state.as_option().unwrap();
|
||||||
|
|
||||||
|
garage.key_helper().delete_key(&mut key).await?;
|
||||||
|
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::NO_CONTENT)
|
||||||
|
.body(Body::empty())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn key_info_results(garage: &Arc<Garage>, key: Key) -> Result<Response<Body>, Error> {
|
||||||
|
let mut relevant_buckets = HashMap::new();
|
||||||
|
|
||||||
|
let key_state = key.state.as_option().unwrap();
|
||||||
|
|
||||||
|
for id in key_state
|
||||||
|
.authorized_buckets
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.map(|(id, _)| id)
|
||||||
|
.chain(
|
||||||
|
key_state
|
||||||
|
.local_aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(_, _, v)| v.as_ref()),
|
||||||
|
) {
|
||||||
|
if !relevant_buckets.contains_key(id) {
|
||||||
|
if let Some(b) = garage.bucket_table.get(&EmptyKey, id).await? {
|
||||||
|
if b.state.as_option().is_some() {
|
||||||
|
relevant_buckets.insert(*id, b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = GetKeyInfoResult {
|
||||||
|
name: key_state.name.get().clone(),
|
||||||
|
access_key_id: key.key_id.clone(),
|
||||||
|
secret_access_key: key_state.secret_key.clone(),
|
||||||
|
permissions: KeyPerm {
|
||||||
|
create_bucket: *key_state.allow_create_bucket.get(),
|
||||||
|
},
|
||||||
|
buckets: relevant_buckets
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, bucket)| {
|
||||||
|
let state = bucket.state.as_option().unwrap();
|
||||||
|
KeyInfoBucketResult {
|
||||||
|
id: hex::encode(bucket.id),
|
||||||
|
global_aliases: state
|
||||||
|
.aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, _, a)| *a)
|
||||||
|
.map(|(n, _, _)| n.to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
local_aliases: state
|
||||||
|
.local_aliases
|
||||||
|
.items()
|
||||||
|
.iter()
|
||||||
|
.filter(|((k, _), _, a)| *a && *k == key.key_id)
|
||||||
|
.map(|((_, n), _, _)| n.to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
permissions: key_state
|
||||||
|
.authorized_buckets
|
||||||
|
.get(&bucket.id)
|
||||||
|
.map(|p| ApiBucketKeyPerm {
|
||||||
|
read: p.allow_read,
|
||||||
|
write: p.allow_write,
|
||||||
|
owner: p.allow_owner,
|
||||||
|
})
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp_json = serde_json::to_string_pretty(&res).map_err(GarageError::from)?;
|
||||||
|
Ok(Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(Body::from(resp_json))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct GetKeyInfoResult {
|
||||||
|
name: String,
|
||||||
|
access_key_id: String,
|
||||||
|
secret_access_key: String,
|
||||||
|
permissions: KeyPerm,
|
||||||
|
buckets: Vec<KeyInfoBucketResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct KeyPerm {
|
||||||
|
#[serde(default)]
|
||||||
|
create_bucket: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct KeyInfoBucketResult {
|
||||||
|
id: String,
|
||||||
|
global_aliases: Vec<String>,
|
||||||
|
local_aliases: Vec<String>,
|
||||||
|
permissions: ApiBucketKeyPerm,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Default)]
|
||||||
|
pub(crate) struct ApiBucketKeyPerm {
|
||||||
|
#[serde(default)]
|
||||||
|
pub(crate) read: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub(crate) write: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub(crate) owner: bool,
|
||||||
|
}
|
7
src/api/admin/mod.rs
Normal file
7
src/api/admin/mod.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
pub mod api_server;
|
||||||
|
mod error;
|
||||||
|
mod router;
|
||||||
|
|
||||||
|
mod bucket;
|
||||||
|
mod cluster;
|
||||||
|
mod key;
|
149
src/api/admin/router.rs
Normal file
149
src/api/admin/router.rs
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use hyper::{Method, Request};
|
||||||
|
|
||||||
|
use crate::admin::error::*;
|
||||||
|
use crate::router_macros::*;
|
||||||
|
|
||||||
|
pub enum Authorization {
|
||||||
|
MetricsToken,
|
||||||
|
AdminToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
router_match! {@func
|
||||||
|
|
||||||
|
/// List of all Admin API endpoints.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum Endpoint {
|
||||||
|
Options,
|
||||||
|
Metrics,
|
||||||
|
GetClusterStatus,
|
||||||
|
ConnectClusterNodes,
|
||||||
|
// Layout
|
||||||
|
GetClusterLayout,
|
||||||
|
UpdateClusterLayout,
|
||||||
|
ApplyClusterLayout,
|
||||||
|
RevertClusterLayout,
|
||||||
|
// Keys
|
||||||
|
ListKeys,
|
||||||
|
CreateKey,
|
||||||
|
ImportKey,
|
||||||
|
GetKeyInfo {
|
||||||
|
id: Option<String>,
|
||||||
|
search: Option<String>,
|
||||||
|
},
|
||||||
|
DeleteKey {
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
UpdateKey {
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
// Buckets
|
||||||
|
ListBuckets,
|
||||||
|
CreateBucket,
|
||||||
|
GetBucketInfo {
|
||||||
|
id: Option<String>,
|
||||||
|
global_alias: Option<String>,
|
||||||
|
},
|
||||||
|
DeleteBucket {
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
PutBucketWebsite {
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
DeleteBucketWebsite {
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
// Bucket-Key Permissions
|
||||||
|
BucketAllowKey,
|
||||||
|
BucketDenyKey,
|
||||||
|
// Bucket aliases
|
||||||
|
GlobalAliasBucket {
|
||||||
|
id: String,
|
||||||
|
alias: String,
|
||||||
|
},
|
||||||
|
GlobalUnaliasBucket {
|
||||||
|
id: String,
|
||||||
|
alias: String,
|
||||||
|
},
|
||||||
|
LocalAliasBucket {
|
||||||
|
id: String,
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
},
|
||||||
|
LocalUnaliasBucket {
|
||||||
|
id: String,
|
||||||
|
access_key_id: String,
|
||||||
|
alias: String,
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
|
||||||
|
impl Endpoint {
|
||||||
|
/// Determine which S3 endpoint a request is for using the request, and a bucket which was
|
||||||
|
/// possibly extracted from the Host header.
|
||||||
|
/// Returns Self plus bucket name, if endpoint is not Endpoint::ListBuckets
|
||||||
|
pub fn from_request<T>(req: &Request<T>) -> Result<Self, Error> {
|
||||||
|
let uri = req.uri();
|
||||||
|
let path = uri.path();
|
||||||
|
let query = uri.query();
|
||||||
|
|
||||||
|
let mut query = QueryParameters::from_query(query.unwrap_or_default())?;
|
||||||
|
|
||||||
|
let res = router_match!(@gen_path_parser (req.method(), path, query) [
|
||||||
|
OPTIONS _ => Options,
|
||||||
|
GET "/metrics" => Metrics,
|
||||||
|
GET "/v0/status" => GetClusterStatus,
|
||||||
|
POST "/v0/connect" => ConnectClusterNodes,
|
||||||
|
// Layout endpoints
|
||||||
|
GET "/v0/layout" => GetClusterLayout,
|
||||||
|
POST "/v0/layout" => UpdateClusterLayout,
|
||||||
|
POST "/v0/layout/apply" => ApplyClusterLayout,
|
||||||
|
POST "/v0/layout/revert" => RevertClusterLayout,
|
||||||
|
// API key endpoints
|
||||||
|
GET "/v0/key" if id => GetKeyInfo (query_opt::id, query_opt::search),
|
||||||
|
GET "/v0/key" if search => GetKeyInfo (query_opt::id, query_opt::search),
|
||||||
|
POST "/v0/key" if id => UpdateKey (query::id),
|
||||||
|
POST "/v0/key" => CreateKey,
|
||||||
|
POST "/v0/key/import" => ImportKey,
|
||||||
|
DELETE "/v0/key" if id => DeleteKey (query::id),
|
||||||
|
GET "/v0/key" => ListKeys,
|
||||||
|
// Bucket endpoints
|
||||||
|
GET "/v0/bucket" if id => GetBucketInfo (query_opt::id, query_opt::global_alias),
|
||||||
|
GET "/v0/bucket" if global_alias => GetBucketInfo (query_opt::id, query_opt::global_alias),
|
||||||
|
GET "/v0/bucket" => ListBuckets,
|
||||||
|
POST "/v0/bucket" => CreateBucket,
|
||||||
|
DELETE "/v0/bucket" if id => DeleteBucket (query::id),
|
||||||
|
PUT "/v0/bucket/website" if id => PutBucketWebsite (query::id),
|
||||||
|
DELETE "/v0/bucket/website" if id => DeleteBucketWebsite (query::id),
|
||||||
|
// Bucket-key permissions
|
||||||
|
POST "/v0/bucket/allow" => BucketAllowKey,
|
||||||
|
POST "/v0/bucket/deny" => BucketDenyKey,
|
||||||
|
// Bucket aliases
|
||||||
|
PUT "/v0/bucket/alias/global" => GlobalAliasBucket (query::id, query::alias),
|
||||||
|
DELETE "/v0/bucket/alias/global" => GlobalUnaliasBucket (query::id, query::alias),
|
||||||
|
PUT "/v0/bucket/alias/local" => LocalAliasBucket (query::id, query::access_key_id, query::alias),
|
||||||
|
DELETE "/v0/bucket/alias/local" => LocalUnaliasBucket (query::id, query::access_key_id, query::alias),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if let Some(message) = query.nonempty_message() {
|
||||||
|
debug!("Unused query parameter: {}", message)
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
/// Get the kind of authorization which is required to perform the operation.
|
||||||
|
pub fn authorization_type(&self) -> Authorization {
|
||||||
|
match self {
|
||||||
|
Self::Metrics => Authorization::MetricsToken,
|
||||||
|
_ => Authorization::AdminToken,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
generateQueryParameters! {
|
||||||
|
"id" => id,
|
||||||
|
"search" => search,
|
||||||
|
"globalAlias" => global_alias,
|
||||||
|
"alias" => alias,
|
||||||
|
"accessKeyId" => access_key_id
|
||||||
|
}
|
177
src/api/common_error.rs
Normal file
177
src/api/common_error.rs
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
use err_derive::Error;
|
||||||
|
use hyper::StatusCode;
|
||||||
|
|
||||||
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
|
/// Errors of this crate
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum CommonError {
|
||||||
|
// ---- INTERNAL ERRORS ----
|
||||||
|
/// Error related to deeper parts of Garage
|
||||||
|
#[error(display = "Internal error: {}", _0)]
|
||||||
|
InternalError(#[error(source)] GarageError),
|
||||||
|
|
||||||
|
/// Error related to Hyper
|
||||||
|
#[error(display = "Internal error (Hyper error): {}", _0)]
|
||||||
|
Hyper(#[error(source)] hyper::Error),
|
||||||
|
|
||||||
|
/// Error related to HTTP
|
||||||
|
#[error(display = "Internal error (HTTP error): {}", _0)]
|
||||||
|
Http(#[error(source)] http::Error),
|
||||||
|
|
||||||
|
// ---- GENERIC CLIENT ERRORS ----
|
||||||
|
/// Proper authentication was not provided
|
||||||
|
#[error(display = "Forbidden: {}", _0)]
|
||||||
|
Forbidden(String),
|
||||||
|
|
||||||
|
/// Generic bad request response with custom message
|
||||||
|
#[error(display = "Bad request: {}", _0)]
|
||||||
|
BadRequest(String),
|
||||||
|
|
||||||
|
// ---- SPECIFIC ERROR CONDITIONS ----
|
||||||
|
// These have to be error codes referenced in the S3 spec here:
|
||||||
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList
|
||||||
|
/// The bucket requested don't exists
|
||||||
|
#[error(display = "Bucket not found: {}", _0)]
|
||||||
|
NoSuchBucket(String),
|
||||||
|
|
||||||
|
/// Tried to create a bucket that already exist
|
||||||
|
#[error(display = "Bucket already exists")]
|
||||||
|
BucketAlreadyExists,
|
||||||
|
|
||||||
|
/// Tried to delete a non-empty bucket
|
||||||
|
#[error(display = "Tried to delete a non-empty bucket")]
|
||||||
|
BucketNotEmpty,
|
||||||
|
|
||||||
|
// Category: bad request
|
||||||
|
/// Bucket name is not valid according to AWS S3 specs
|
||||||
|
#[error(display = "Invalid bucket name: {}", _0)]
|
||||||
|
InvalidBucketName(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonError {
|
||||||
|
pub fn http_status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
CommonError::InternalError(
|
||||||
|
GarageError::Timeout
|
||||||
|
| GarageError::RemoteError(_)
|
||||||
|
| GarageError::Quorum(_, _, _, _),
|
||||||
|
) => StatusCode::SERVICE_UNAVAILABLE,
|
||||||
|
CommonError::InternalError(_) | CommonError::Hyper(_) | CommonError::Http(_) => {
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
}
|
||||||
|
CommonError::BadRequest(_) => StatusCode::BAD_REQUEST,
|
||||||
|
CommonError::Forbidden(_) => StatusCode::FORBIDDEN,
|
||||||
|
CommonError::NoSuchBucket(_) => StatusCode::NOT_FOUND,
|
||||||
|
CommonError::BucketNotEmpty | CommonError::BucketAlreadyExists => StatusCode::CONFLICT,
|
||||||
|
CommonError::InvalidBucketName(_) => StatusCode::BAD_REQUEST,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn aws_code(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
CommonError::Forbidden(_) => "AccessDenied",
|
||||||
|
CommonError::InternalError(
|
||||||
|
GarageError::Timeout
|
||||||
|
| GarageError::RemoteError(_)
|
||||||
|
| GarageError::Quorum(_, _, _, _),
|
||||||
|
) => "ServiceUnavailable",
|
||||||
|
CommonError::InternalError(_) | CommonError::Hyper(_) | CommonError::Http(_) => {
|
||||||
|
"InternalError"
|
||||||
|
}
|
||||||
|
CommonError::BadRequest(_) => "InvalidRequest",
|
||||||
|
CommonError::NoSuchBucket(_) => "NoSuchBucket",
|
||||||
|
CommonError::BucketAlreadyExists => "BucketAlreadyExists",
|
||||||
|
CommonError::BucketNotEmpty => "BucketNotEmpty",
|
||||||
|
CommonError::InvalidBucketName(_) => "InvalidBucketName",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bad_request<M: ToString>(msg: M) -> Self {
|
||||||
|
CommonError::BadRequest(msg.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait CommonErrorDerivative: From<CommonError> {
|
||||||
|
fn internal_error<M: ToString>(msg: M) -> Self {
|
||||||
|
Self::from(CommonError::InternalError(GarageError::Message(
|
||||||
|
msg.to_string(),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bad_request<M: ToString>(msg: M) -> Self {
|
||||||
|
Self::from(CommonError::BadRequest(msg.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn forbidden<M: ToString>(msg: M) -> Self {
|
||||||
|
Self::from(CommonError::Forbidden(msg.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait to map error to the Bad Request error code
|
||||||
|
pub trait OkOrBadRequest {
|
||||||
|
type S;
|
||||||
|
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<Self::S, CommonError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> OkOrBadRequest for Result<T, E>
|
||||||
|
where
|
||||||
|
E: std::fmt::Display,
|
||||||
|
{
|
||||||
|
type S = T;
|
||||||
|
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<T, CommonError> {
|
||||||
|
match self {
|
||||||
|
Ok(x) => Ok(x),
|
||||||
|
Err(e) => Err(CommonError::BadRequest(format!(
|
||||||
|
"{}: {}",
|
||||||
|
reason.as_ref(),
|
||||||
|
e
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> OkOrBadRequest for Option<T> {
|
||||||
|
type S = T;
|
||||||
|
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<T, CommonError> {
|
||||||
|
match self {
|
||||||
|
Some(x) => Ok(x),
|
||||||
|
None => Err(CommonError::BadRequest(reason.as_ref().to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait to map an error to an Internal Error code
|
||||||
|
pub trait OkOrInternalError {
|
||||||
|
type S;
|
||||||
|
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<Self::S, CommonError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> OkOrInternalError for Result<T, E>
|
||||||
|
where
|
||||||
|
E: std::fmt::Display,
|
||||||
|
{
|
||||||
|
type S = T;
|
||||||
|
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<T, CommonError> {
|
||||||
|
match self {
|
||||||
|
Ok(x) => Ok(x),
|
||||||
|
Err(e) => Err(CommonError::InternalError(GarageError::Message(format!(
|
||||||
|
"{}: {}",
|
||||||
|
reason.as_ref(),
|
||||||
|
e
|
||||||
|
)))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> OkOrInternalError for Option<T> {
|
||||||
|
type S = T;
|
||||||
|
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<T, CommonError> {
|
||||||
|
match self {
|
||||||
|
Some(x) => Ok(x),
|
||||||
|
None => Err(CommonError::InternalError(GarageError::Message(
|
||||||
|
reason.as_ref().to_string(),
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -5,9 +5,11 @@ use async_trait::async_trait;
|
|||||||
|
|
||||||
use futures::future::Future;
|
use futures::future::Future;
|
||||||
|
|
||||||
|
use hyper::header::HeaderValue;
|
||||||
use hyper::server::conn::AddrStream;
|
use hyper::server::conn::AddrStream;
|
||||||
use hyper::service::{make_service_fn, service_fn};
|
use hyper::service::{make_service_fn, service_fn};
|
||||||
use hyper::{Body, Request, Response, Server};
|
use hyper::{Body, Request, Response, Server};
|
||||||
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
use opentelemetry::{
|
use opentelemetry::{
|
||||||
global,
|
global,
|
||||||
@ -19,26 +21,31 @@ use opentelemetry::{
|
|||||||
use garage_util::error::Error as GarageError;
|
use garage_util::error::Error as GarageError;
|
||||||
use garage_util::metrics::{gen_trace_id, RecordDuration};
|
use garage_util::metrics::{gen_trace_id, RecordDuration};
|
||||||
|
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
pub(crate) trait ApiEndpoint: Send + Sync + 'static {
|
pub(crate) trait ApiEndpoint: Send + Sync + 'static {
|
||||||
fn name(&self) -> &'static str;
|
fn name(&self) -> &'static str;
|
||||||
fn add_span_attributes(&self, span: SpanRef<'_>);
|
fn add_span_attributes(&self, span: SpanRef<'_>);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait ApiError: std::error::Error + Send + Sync + 'static {
|
||||||
|
fn http_status_code(&self) -> StatusCode;
|
||||||
|
fn add_http_headers(&self, header_map: &mut HeaderMap<HeaderValue>);
|
||||||
|
fn http_body(&self, garage_region: &str, path: &str) -> Body;
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub(crate) trait ApiHandler: Send + Sync + 'static {
|
pub(crate) trait ApiHandler: Send + Sync + 'static {
|
||||||
const API_NAME: &'static str;
|
const API_NAME: &'static str;
|
||||||
const API_NAME_DISPLAY: &'static str;
|
const API_NAME_DISPLAY: &'static str;
|
||||||
|
|
||||||
type Endpoint: ApiEndpoint;
|
type Endpoint: ApiEndpoint;
|
||||||
|
type Error: ApiError;
|
||||||
|
|
||||||
fn parse_endpoint(&self, r: &Request<Body>) -> Result<Self::Endpoint, Error>;
|
fn parse_endpoint(&self, r: &Request<Body>) -> Result<Self::Endpoint, Self::Error>;
|
||||||
async fn handle(
|
async fn handle(
|
||||||
&self,
|
&self,
|
||||||
req: Request<Body>,
|
req: Request<Body>,
|
||||||
endpoint: Self::Endpoint,
|
endpoint: Self::Endpoint,
|
||||||
) -> Result<Response<Body>, Error>;
|
) -> Result<Response<Body>, Self::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct ApiServer<A: ApiHandler> {
|
pub(crate) struct ApiServer<A: ApiHandler> {
|
||||||
@ -142,13 +149,13 @@ impl<A: ApiHandler> ApiServer<A> {
|
|||||||
Ok(x)
|
Ok(x)
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let body: Body = Body::from(e.aws_xml(&self.region, uri.path()));
|
let body: Body = e.http_body(&self.region, uri.path());
|
||||||
let mut http_error_builder = Response::builder()
|
let mut http_error_builder = Response::builder()
|
||||||
.status(e.http_status_code())
|
.status(e.http_status_code())
|
||||||
.header("Content-Type", "application/xml");
|
.header("Content-Type", "application/xml");
|
||||||
|
|
||||||
if let Some(header_map) = http_error_builder.headers_mut() {
|
if let Some(header_map) = http_error_builder.headers_mut() {
|
||||||
e.add_headers(header_map)
|
e.add_http_headers(header_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
let http_error = http_error_builder.body(body)?;
|
let http_error = http_error_builder.body(body)?;
|
||||||
@ -163,7 +170,7 @@ impl<A: ApiHandler> ApiServer<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handler_stage2(&self, req: Request<Body>) -> Result<Response<Body>, Error> {
|
async fn handler_stage2(&self, req: Request<Body>) -> Result<Response<Body>, A::Error> {
|
||||||
let endpoint = self.api_handler.parse_endpoint(&req)?;
|
let endpoint = self.api_handler.parse_endpoint(&req)?;
|
||||||
debug!("Endpoint: {}", endpoint.name());
|
debug!("Endpoint: {}", endpoint.name());
|
||||||
|
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
|
use hyper::{Body, Request};
|
||||||
use idna::domain_to_unicode;
|
use idna::domain_to_unicode;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use garage_util::data::*;
|
use crate::common_error::{CommonError as Error, *};
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
|
||||||
use garage_model::key_table::Key;
|
|
||||||
|
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
/// What kind of authorization is required to perform a given action
|
/// What kind of authorization is required to perform a given action
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
@ -50,7 +47,7 @@ pub fn authority_to_host(authority: &str) -> Result<String, Error> {
|
|||||||
let mut iter = authority.chars().enumerate();
|
let mut iter = authority.chars().enumerate();
|
||||||
let (_, first_char) = iter
|
let (_, first_char) = iter
|
||||||
.next()
|
.next()
|
||||||
.ok_or_else(|| Error::BadRequest("Authority is empty".to_string()))?;
|
.ok_or_else(|| Error::bad_request("Authority is empty".to_string()))?;
|
||||||
|
|
||||||
let split = match first_char {
|
let split = match first_char {
|
||||||
'[' => {
|
'[' => {
|
||||||
@ -58,7 +55,7 @@ pub fn authority_to_host(authority: &str) -> Result<String, Error> {
|
|||||||
match iter.next() {
|
match iter.next() {
|
||||||
Some((_, ']')) => iter.next(),
|
Some((_, ']')) => iter.next(),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Authority {} has an illegal format",
|
"Authority {} has an illegal format",
|
||||||
authority
|
authority
|
||||||
)))
|
)))
|
||||||
@ -71,7 +68,7 @@ pub fn authority_to_host(authority: &str) -> Result<String, Error> {
|
|||||||
let authority = match split {
|
let authority = match split {
|
||||||
Some((i, ':')) => Ok(&authority[..i]),
|
Some((i, ':')) => Ok(&authority[..i]),
|
||||||
None => Ok(authority),
|
None => Ok(authority),
|
||||||
Some((_, _)) => Err(Error::BadRequest(format!(
|
Some((_, _)) => Err(Error::bad_request(format!(
|
||||||
"Authority {} has an illegal format",
|
"Authority {} has an illegal format",
|
||||||
authority
|
authority
|
||||||
))),
|
))),
|
||||||
@ -79,28 +76,6 @@ pub fn authority_to_host(authority: &str) -> Result<String, Error> {
|
|||||||
authority.map(|h| domain_to_unicode(h).0)
|
authority.map(|h| domain_to_unicode(h).0)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::ptr_arg)]
|
|
||||||
pub async fn resolve_bucket(
|
|
||||||
garage: &Garage,
|
|
||||||
bucket_name: &String,
|
|
||||||
api_key: &Key,
|
|
||||||
) -> Result<Uuid, Error> {
|
|
||||||
let api_key_params = api_key
|
|
||||||
.state
|
|
||||||
.as_option()
|
|
||||||
.ok_or_internal_error("Key should not be deleted at this point")?;
|
|
||||||
|
|
||||||
if let Some(Some(bucket_id)) = api_key_params.local_aliases.get(bucket_name) {
|
|
||||||
Ok(*bucket_id)
|
|
||||||
} else {
|
|
||||||
Ok(garage
|
|
||||||
.bucket_helper()
|
|
||||||
.resolve_global_bucket_name(bucket_name)
|
|
||||||
.await?
|
|
||||||
.ok_or(Error::NoSuchBucket)?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extract the bucket name and the key name from an HTTP path and possibly a bucket provided in
|
/// Extract the bucket name and the key name from an HTTP path and possibly a bucket provided in
|
||||||
/// the host header of the request
|
/// the host header of the request
|
||||||
///
|
///
|
||||||
@ -132,7 +107,7 @@ pub fn parse_bucket_key<'a>(
|
|||||||
None => (path, None),
|
None => (path, None),
|
||||||
};
|
};
|
||||||
if bucket.is_empty() {
|
if bucket.is_empty() {
|
||||||
return Err(Error::BadRequest("No bucket specified".to_string()));
|
return Err(Error::bad_request("No bucket specified"));
|
||||||
}
|
}
|
||||||
Ok((bucket, key))
|
Ok((bucket, key))
|
||||||
}
|
}
|
||||||
@ -163,6 +138,12 @@ pub fn key_after_prefix(pfx: &str) -> Option<String> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn parse_json_body<T: for<'de> Deserialize<'de>>(req: Request<Body>) -> Result<T, Error> {
|
||||||
|
let body = hyper::body::to_bytes(req.into_body()).await?;
|
||||||
|
let resp: T = serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?;
|
||||||
|
Ok(resp)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -298,3 +279,11 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub(crate) struct CustomApiErrorBody {
|
||||||
|
pub(crate) code: String,
|
||||||
|
pub(crate) message: String,
|
||||||
|
pub(crate) region: String,
|
||||||
|
pub(crate) path: String,
|
||||||
|
}
|
||||||
|
@ -7,13 +7,12 @@ use hyper::{Body, Method, Request, Response};
|
|||||||
|
|
||||||
use opentelemetry::{trace::SpanRef, KeyValue};
|
use opentelemetry::{trace::SpanRef, KeyValue};
|
||||||
|
|
||||||
use garage_table::util::*;
|
|
||||||
use garage_util::error::Error as GarageError;
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::generic_server::*;
|
use crate::generic_server::*;
|
||||||
|
use crate::k2v::error::*;
|
||||||
|
|
||||||
use crate::signature::payload::check_payload_signature;
|
use crate::signature::payload::check_payload_signature;
|
||||||
use crate::signature::streaming::*;
|
use crate::signature::streaming::*;
|
||||||
@ -60,6 +59,7 @@ impl ApiHandler for K2VApiServer {
|
|||||||
const API_NAME_DISPLAY: &'static str = "K2V";
|
const API_NAME_DISPLAY: &'static str = "K2V";
|
||||||
|
|
||||||
type Endpoint = K2VApiEndpoint;
|
type Endpoint = K2VApiEndpoint;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
fn parse_endpoint(&self, req: &Request<Body>) -> Result<K2VApiEndpoint, Error> {
|
fn parse_endpoint(&self, req: &Request<Body>) -> Result<K2VApiEndpoint, Error> {
|
||||||
let (endpoint, bucket_name) = Endpoint::from_request(req)?;
|
let (endpoint, bucket_name) = Endpoint::from_request(req)?;
|
||||||
@ -83,13 +83,14 @@ impl ApiHandler for K2VApiServer {
|
|||||||
|
|
||||||
// The OPTIONS method is procesed early, before we even check for an API key
|
// The OPTIONS method is procesed early, before we even check for an API key
|
||||||
if let Endpoint::Options = endpoint {
|
if let Endpoint::Options = endpoint {
|
||||||
return handle_options_s3api(garage, &req, Some(bucket_name)).await;
|
return Ok(handle_options_s3api(garage, &req, Some(bucket_name))
|
||||||
|
.await
|
||||||
|
.ok_or_bad_request("Error handling OPTIONS")?);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (api_key, mut content_sha256) = check_payload_signature(&garage, "k2v", &req).await?;
|
let (api_key, mut content_sha256) = check_payload_signature(&garage, "k2v", &req).await?;
|
||||||
let api_key = api_key.ok_or_else(|| {
|
let api_key = api_key
|
||||||
Error::Forbidden("Garage does not support anonymous access yet".to_string())
|
.ok_or_else(|| Error::forbidden("Garage does not support anonymous access yet"))?;
|
||||||
})?;
|
|
||||||
|
|
||||||
let req = parse_streaming_body(
|
let req = parse_streaming_body(
|
||||||
&api_key,
|
&api_key,
|
||||||
@ -99,13 +100,14 @@ impl ApiHandler for K2VApiServer {
|
|||||||
"k2v",
|
"k2v",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let bucket_id = resolve_bucket(&garage, &bucket_name, &api_key).await?;
|
let bucket_id = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.resolve_bucket(&bucket_name, &api_key)
|
||||||
|
.await?;
|
||||||
let bucket = garage
|
let bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.filter(|b| !b.state.is_deleted())
|
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let allowed = match endpoint.authorization_type() {
|
let allowed = match endpoint.authorization_type() {
|
||||||
Authorization::Read => api_key.allow_read(&bucket_id),
|
Authorization::Read => api_key.allow_read(&bucket_id),
|
||||||
@ -115,9 +117,7 @@ impl ApiHandler for K2VApiServer {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if !allowed {
|
if !allowed {
|
||||||
return Err(Error::Forbidden(
|
return Err(Error::forbidden("Operation is not allowed for this key."));
|
||||||
"Operation is not allowed for this key.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look up what CORS rule might apply to response.
|
// Look up what CORS rule might apply to response.
|
||||||
@ -125,7 +125,8 @@ impl ApiHandler for K2VApiServer {
|
|||||||
// are always preflighted, i.e. the browser should make
|
// are always preflighted, i.e. the browser should make
|
||||||
// an OPTIONS call before to check it is allowed
|
// an OPTIONS call before to check it is allowed
|
||||||
let matching_cors_rule = match *req.method() {
|
let matching_cors_rule = match *req.method() {
|
||||||
Method::GET | Method::HEAD | Method::POST => find_matching_cors_rule(&bucket, &req)?,
|
Method::GET | Method::HEAD | Method::POST => find_matching_cors_rule(&bucket, &req)
|
||||||
|
.ok_or_internal_error("Error looking up CORS rule")?,
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -12,7 +12,8 @@ use garage_model::garage::Garage;
|
|||||||
use garage_model::k2v::causality::*;
|
use garage_model::k2v::causality::*;
|
||||||
use garage_model::k2v::item_table::*;
|
use garage_model::k2v::item_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::helpers::*;
|
||||||
|
use crate::k2v::error::*;
|
||||||
use crate::k2v::range::read_range;
|
use crate::k2v::range::read_range;
|
||||||
|
|
||||||
pub async fn handle_insert_batch(
|
pub async fn handle_insert_batch(
|
||||||
@ -20,9 +21,7 @@ pub async fn handle_insert_batch(
|
|||||||
bucket_id: Uuid,
|
bucket_id: Uuid,
|
||||||
req: Request<Body>,
|
req: Request<Body>,
|
||||||
) -> Result<Response<Body>, Error> {
|
) -> Result<Response<Body>, Error> {
|
||||||
let body = hyper::body::to_bytes(req.into_body()).await?;
|
let items = parse_json_body::<Vec<InsertBatchItem>>(req).await?;
|
||||||
let items: Vec<InsertBatchItem> =
|
|
||||||
serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?;
|
|
||||||
|
|
||||||
let mut items2 = vec![];
|
let mut items2 = vec![];
|
||||||
for it in items {
|
for it in items {
|
||||||
@ -52,9 +51,7 @@ pub async fn handle_read_batch(
|
|||||||
bucket_id: Uuid,
|
bucket_id: Uuid,
|
||||||
req: Request<Body>,
|
req: Request<Body>,
|
||||||
) -> Result<Response<Body>, Error> {
|
) -> Result<Response<Body>, Error> {
|
||||||
let body = hyper::body::to_bytes(req.into_body()).await?;
|
let queries = parse_json_body::<Vec<ReadBatchQuery>>(req).await?;
|
||||||
let queries: Vec<ReadBatchQuery> =
|
|
||||||
serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?;
|
|
||||||
|
|
||||||
let resp_results = futures::future::join_all(
|
let resp_results = futures::future::join_all(
|
||||||
queries
|
queries
|
||||||
@ -91,7 +88,7 @@ async fn handle_read_batch_query(
|
|||||||
|
|
||||||
let (items, more, next_start) = if query.single_item {
|
let (items, more, next_start) = if query.single_item {
|
||||||
if query.prefix.is_some() || query.end.is_some() || query.limit.is_some() || query.reverse {
|
if query.prefix.is_some() || query.end.is_some() || query.limit.is_some() || query.reverse {
|
||||||
return Err(Error::BadRequest("Batch query parameters 'prefix', 'end', 'limit' and 'reverse' must not be set when singleItem is true.".into()));
|
return Err(Error::bad_request("Batch query parameters 'prefix', 'end', 'limit' and 'reverse' must not be set when singleItem is true."));
|
||||||
}
|
}
|
||||||
let sk = query
|
let sk = query
|
||||||
.start
|
.start
|
||||||
@ -149,9 +146,7 @@ pub async fn handle_delete_batch(
|
|||||||
bucket_id: Uuid,
|
bucket_id: Uuid,
|
||||||
req: Request<Body>,
|
req: Request<Body>,
|
||||||
) -> Result<Response<Body>, Error> {
|
) -> Result<Response<Body>, Error> {
|
||||||
let body = hyper::body::to_bytes(req.into_body()).await?;
|
let queries = parse_json_body::<Vec<DeleteBatchQuery>>(req).await?;
|
||||||
let queries: Vec<DeleteBatchQuery> =
|
|
||||||
serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?;
|
|
||||||
|
|
||||||
let resp_results = futures::future::join_all(
|
let resp_results = futures::future::join_all(
|
||||||
queries
|
queries
|
||||||
@ -188,7 +183,7 @@ async fn handle_delete_batch_query(
|
|||||||
|
|
||||||
let deleted_items = if query.single_item {
|
let deleted_items = if query.single_item {
|
||||||
if query.prefix.is_some() || query.end.is_some() {
|
if query.prefix.is_some() || query.end.is_some() {
|
||||||
return Err(Error::BadRequest("Batch query parameters 'prefix' and 'end' must not be set when singleItem is true.".into()));
|
return Err(Error::bad_request("Batch query parameters 'prefix' and 'end' must not be set when singleItem is true."));
|
||||||
}
|
}
|
||||||
let sk = query
|
let sk = query
|
||||||
.start
|
.start
|
||||||
|
134
src/api/k2v/error.rs
Normal file
134
src/api/k2v/error.rs
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
use err_derive::Error;
|
||||||
|
use hyper::header::HeaderValue;
|
||||||
|
use hyper::{Body, HeaderMap, StatusCode};
|
||||||
|
|
||||||
|
use garage_model::helper::error::Error as HelperError;
|
||||||
|
|
||||||
|
use crate::common_error::CommonError;
|
||||||
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
|
use crate::generic_server::ApiError;
|
||||||
|
use crate::helpers::CustomApiErrorBody;
|
||||||
|
use crate::signature::error::Error as SignatureError;
|
||||||
|
|
||||||
|
/// Errors of this crate
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(display = "{}", _0)]
|
||||||
|
/// Error from common error
|
||||||
|
Common(CommonError),
|
||||||
|
|
||||||
|
// Category: cannot process
|
||||||
|
/// Authorization Header Malformed
|
||||||
|
#[error(display = "Authorization header malformed, expected scope: {}", _0)]
|
||||||
|
AuthorizationHeaderMalformed(String),
|
||||||
|
|
||||||
|
/// The object requested don't exists
|
||||||
|
#[error(display = "Key not found")]
|
||||||
|
NoSuchKey,
|
||||||
|
|
||||||
|
/// Some base64 encoded data was badly encoded
|
||||||
|
#[error(display = "Invalid base64: {}", _0)]
|
||||||
|
InvalidBase64(#[error(source)] base64::DecodeError),
|
||||||
|
|
||||||
|
/// The client sent a header with invalid value
|
||||||
|
#[error(display = "Invalid header value: {}", _0)]
|
||||||
|
InvalidHeader(#[error(source)] hyper::header::ToStrError),
|
||||||
|
|
||||||
|
/// The client asked for an invalid return format (invalid Accept header)
|
||||||
|
#[error(display = "Not acceptable: {}", _0)]
|
||||||
|
NotAcceptable(String),
|
||||||
|
|
||||||
|
/// The request contained an invalid UTF-8 sequence in its path or in other parameters
|
||||||
|
#[error(display = "Invalid UTF-8: {}", _0)]
|
||||||
|
InvalidUtf8Str(#[error(source)] std::str::Utf8Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
CommonError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::Common(CommonError::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
|
impl From<HelperError> for Error {
|
||||||
|
fn from(err: HelperError) -> Self {
|
||||||
|
match err {
|
||||||
|
HelperError::Internal(i) => Self::Common(CommonError::InternalError(i)),
|
||||||
|
HelperError::BadRequest(b) => Self::Common(CommonError::BadRequest(b)),
|
||||||
|
HelperError::InvalidBucketName(n) => Self::Common(CommonError::InvalidBucketName(n)),
|
||||||
|
HelperError::NoSuchBucket(n) => Self::Common(CommonError::NoSuchBucket(n)),
|
||||||
|
e => Self::Common(CommonError::BadRequest(format!("{}", e))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SignatureError> for Error {
|
||||||
|
fn from(err: SignatureError) -> Self {
|
||||||
|
match err {
|
||||||
|
SignatureError::Common(c) => Self::Common(c),
|
||||||
|
SignatureError::AuthorizationHeaderMalformed(c) => {
|
||||||
|
Self::AuthorizationHeaderMalformed(c)
|
||||||
|
}
|
||||||
|
SignatureError::InvalidUtf8Str(i) => Self::InvalidUtf8Str(i),
|
||||||
|
SignatureError::InvalidHeader(h) => Self::InvalidHeader(h),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
/// This returns a keyword for the corresponding error.
|
||||||
|
/// Here, these keywords are not necessarily those from AWS S3,
|
||||||
|
/// as we are building a custom API
|
||||||
|
fn code(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Error::Common(c) => c.aws_code(),
|
||||||
|
Error::NoSuchKey => "NoSuchKey",
|
||||||
|
Error::NotAcceptable(_) => "NotAcceptable",
|
||||||
|
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
|
||||||
|
Error::InvalidBase64(_) => "InvalidBase64",
|
||||||
|
Error::InvalidHeader(_) => "InvalidHeaderValue",
|
||||||
|
Error::InvalidUtf8Str(_) => "InvalidUtf8String",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiError for Error {
|
||||||
|
/// Get the HTTP status code that best represents the meaning of the error for the client
|
||||||
|
fn http_status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::Common(c) => c.http_status_code(),
|
||||||
|
Error::NoSuchKey => StatusCode::NOT_FOUND,
|
||||||
|
Error::NotAcceptable(_) => StatusCode::NOT_ACCEPTABLE,
|
||||||
|
Error::AuthorizationHeaderMalformed(_)
|
||||||
|
| Error::InvalidBase64(_)
|
||||||
|
| Error::InvalidHeader(_)
|
||||||
|
| Error::InvalidUtf8Str(_) => StatusCode::BAD_REQUEST,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_http_headers(&self, _header_map: &mut HeaderMap<HeaderValue>) {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
fn http_body(&self, garage_region: &str, path: &str) -> Body {
|
||||||
|
let error = CustomApiErrorBody {
|
||||||
|
code: self.code().to_string(),
|
||||||
|
message: format!("{}", self),
|
||||||
|
path: path.to_string(),
|
||||||
|
region: garage_region.to_string(),
|
||||||
|
};
|
||||||
|
Body::from(serde_json::to_string_pretty(&error).unwrap_or_else(|_| {
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"code": "InternalError",
|
||||||
|
"message": "JSON encoding of error failed"
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
@ -12,7 +12,7 @@ use garage_table::util::*;
|
|||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::k2v::counter_table::{BYTES, CONFLICTS, ENTRIES, VALUES};
|
use garage_model::k2v::counter_table::{BYTES, CONFLICTS, ENTRIES, VALUES};
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::k2v::error::*;
|
||||||
use crate::k2v::range::read_range;
|
use crate::k2v::range::read_range;
|
||||||
|
|
||||||
pub async fn handle_read_index(
|
pub async fn handle_read_index(
|
||||||
|
@ -10,7 +10,7 @@ use garage_model::garage::Garage;
|
|||||||
use garage_model::k2v::causality::*;
|
use garage_model::k2v::causality::*;
|
||||||
use garage_model::k2v::item_table::*;
|
use garage_model::k2v::item_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::k2v::error::*;
|
||||||
|
|
||||||
pub const X_GARAGE_CAUSALITY_TOKEN: &str = "X-Garage-Causality-Token";
|
pub const X_GARAGE_CAUSALITY_TOKEN: &str = "X-Garage-Causality-Token";
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
pub mod api_server;
|
pub mod api_server;
|
||||||
|
mod error;
|
||||||
mod router;
|
mod router;
|
||||||
|
|
||||||
mod batch;
|
mod batch;
|
||||||
|
@ -7,8 +7,8 @@ use std::sync::Arc;
|
|||||||
use garage_table::replication::TableShardedReplication;
|
use garage_table::replication::TableShardedReplication;
|
||||||
use garage_table::*;
|
use garage_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::helpers::key_after_prefix;
|
use crate::helpers::key_after_prefix;
|
||||||
|
use crate::k2v::error::*;
|
||||||
|
|
||||||
/// Read range in a Garage table.
|
/// Read range in a Garage table.
|
||||||
/// Returns (entries, more?, nextStart)
|
/// Returns (entries, more?, nextStart)
|
||||||
@ -31,7 +31,7 @@ where
|
|||||||
(None, Some(s)) => (Some(s.clone()), false),
|
(None, Some(s)) => (Some(s.clone()), false),
|
||||||
(Some(p), Some(s)) => {
|
(Some(p), Some(s)) => {
|
||||||
if !s.starts_with(p) {
|
if !s.starts_with(p) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Start key '{}' does not start with prefix '{}'",
|
"Start key '{}' does not start with prefix '{}'",
|
||||||
s, p
|
s, p
|
||||||
)));
|
)));
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::error::*;
|
use crate::k2v::error::*;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
@ -62,7 +62,7 @@ impl Endpoint {
|
|||||||
.unwrap_or((path.to_owned(), ""));
|
.unwrap_or((path.to_owned(), ""));
|
||||||
|
|
||||||
if bucket.is_empty() {
|
if bucket.is_empty() {
|
||||||
return Err(Error::BadRequest("Missing bucket name".to_owned()));
|
return Err(Error::bad_request("Missing bucket name"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if *req.method() == Method::OPTIONS {
|
if *req.method() == Method::OPTIONS {
|
||||||
@ -83,7 +83,7 @@ impl Endpoint {
|
|||||||
Method::PUT => Self::from_put(partition_key, &mut query)?,
|
Method::PUT => Self::from_put(partition_key, &mut query)?,
|
||||||
Method::DELETE => Self::from_delete(partition_key, &mut query)?,
|
Method::DELETE => Self::from_delete(partition_key, &mut query)?,
|
||||||
_ if req.method() == method_search => Self::from_search(partition_key, &mut query)?,
|
_ if req.method() == method_search => Self::from_search(partition_key, &mut query)?,
|
||||||
_ => return Err(Error::BadRequest("Unknown method".to_owned())),
|
_ => return Err(Error::bad_request("Unknown method")),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(message) = query.nonempty_message() {
|
if let Some(message) = query.nonempty_message() {
|
||||||
|
@ -2,16 +2,16 @@
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate tracing;
|
extern crate tracing;
|
||||||
|
|
||||||
pub mod error;
|
pub mod common_error;
|
||||||
pub use error::Error;
|
|
||||||
|
|
||||||
mod encoding;
|
mod encoding;
|
||||||
mod generic_server;
|
pub mod generic_server;
|
||||||
pub mod helpers;
|
pub mod helpers;
|
||||||
mod router_macros;
|
mod router_macros;
|
||||||
/// This mode is public only to help testing. Don't expect stability here
|
/// This mode is public only to help testing. Don't expect stability here
|
||||||
pub mod signature;
|
pub mod signature;
|
||||||
|
|
||||||
|
pub mod admin;
|
||||||
#[cfg(feature = "k2v")]
|
#[cfg(feature = "k2v")]
|
||||||
pub mod k2v;
|
pub mod k2v;
|
||||||
pub mod s3;
|
pub mod s3;
|
||||||
|
@ -23,6 +23,29 @@ macro_rules! router_match {
|
|||||||
_ => None
|
_ => None
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
(@gen_path_parser ($method:expr, $reqpath:expr, $query:expr)
|
||||||
|
[
|
||||||
|
$($meth:ident $path:pat $(if $required:ident)? => $api:ident $(($($conv:ident :: $param:ident),*))?,)*
|
||||||
|
]) => {{
|
||||||
|
{
|
||||||
|
use Endpoint::*;
|
||||||
|
match ($method, $reqpath) {
|
||||||
|
$(
|
||||||
|
(&Method::$meth, $path) if true $(&& $query.$required.is_some())? => $api {
|
||||||
|
$($(
|
||||||
|
$param: router_match!(@@parse_param $query, $conv, $param),
|
||||||
|
)*)?
|
||||||
|
},
|
||||||
|
)*
|
||||||
|
(m, p) => {
|
||||||
|
return Err(Error::bad_request(format!(
|
||||||
|
"Unknown API endpoint: {} {}",
|
||||||
|
m, p
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}};
|
||||||
(@gen_parser ($keyword:expr, $key:ident, $query:expr, $header:expr),
|
(@gen_parser ($keyword:expr, $key:ident, $query:expr, $header:expr),
|
||||||
key: [$($kw_k:ident $(if $required_k:ident)? $(header $header_k:expr)? => $api_k:ident $(($($conv_k:ident :: $param_k:ident),*))?,)*],
|
key: [$($kw_k:ident $(if $required_k:ident)? $(header $header_k:expr)? => $api_k:ident $(($($conv_k:ident :: $param_k:ident),*))?,)*],
|
||||||
no_key: [$($kw_nk:ident $(if $required_nk:ident)? $(if_header $header_nk:expr)? => $api_nk:ident $(($($conv_nk:ident :: $param_nk:ident),*))?,)*]) => {{
|
no_key: [$($kw_nk:ident $(if $required_nk:ident)? $(if_header $header_nk:expr)? => $api_nk:ident $(($($conv_nk:ident :: $param_nk:ident),*))?,)*]) => {{
|
||||||
@ -55,7 +78,7 @@ macro_rules! router_match {
|
|||||||
)*)?
|
)*)?
|
||||||
}),
|
}),
|
||||||
)*
|
)*
|
||||||
(kw, _) => Err(Error::BadRequest(format!("Invalid endpoint: {}", kw)))
|
(kw, _) => Err(Error::bad_request(format!("Invalid endpoint: {}", kw)))
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
|
||||||
@ -74,14 +97,14 @@ macro_rules! router_match {
|
|||||||
.take()
|
.take()
|
||||||
.map(|param| param.parse())
|
.map(|param| param.parse())
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(|_| Error::BadRequest("Failed to parse query parameter".to_owned()))?
|
.map_err(|_| Error::bad_request("Failed to parse query parameter"))?
|
||||||
}};
|
}};
|
||||||
(@@parse_param $query:expr, parse, $param:ident) => {{
|
(@@parse_param $query:expr, parse, $param:ident) => {{
|
||||||
// extract and parse mandatory query parameter
|
// extract and parse mandatory query parameter
|
||||||
// both missing and un-parseable parameters are reported as errors
|
// both missing and un-parseable parameters are reported as errors
|
||||||
$query.$param.take().ok_or_bad_request("Missing argument for endpoint")?
|
$query.$param.take().ok_or_bad_request("Missing argument for endpoint")?
|
||||||
.parse()
|
.parse()
|
||||||
.map_err(|_| Error::BadRequest("Failed to parse query parameter".to_owned()))?
|
.map_err(|_| Error::bad_request("Failed to parse query parameter"))?
|
||||||
}};
|
}};
|
||||||
(@func
|
(@func
|
||||||
$(#[$doc:meta])*
|
$(#[$doc:meta])*
|
||||||
@ -150,7 +173,7 @@ macro_rules! generateQueryParameters {
|
|||||||
false
|
false
|
||||||
} else if v.as_ref().is_empty() {
|
} else if v.as_ref().is_empty() {
|
||||||
if res.keyword.replace(k).is_some() {
|
if res.keyword.replace(k).is_some() {
|
||||||
return Err(Error::BadRequest("Multiple keywords".to_owned()));
|
return Err(Error::bad_request("Multiple keywords"));
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
} else {
|
} else {
|
||||||
@ -160,7 +183,7 @@ macro_rules! generateQueryParameters {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
if repeated {
|
if repeated {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Query parameter repeated: '{}'",
|
"Query parameter repeated: '{}'",
|
||||||
k
|
k
|
||||||
)));
|
)));
|
||||||
|
@ -8,14 +8,13 @@ use hyper::{Body, Method, Request, Response};
|
|||||||
|
|
||||||
use opentelemetry::{trace::SpanRef, KeyValue};
|
use opentelemetry::{trace::SpanRef, KeyValue};
|
||||||
|
|
||||||
use garage_table::util::*;
|
|
||||||
use garage_util::error::Error as GarageError;
|
use garage_util::error::Error as GarageError;
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::key_table::Key;
|
use garage_model::key_table::Key;
|
||||||
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::generic_server::*;
|
use crate::generic_server::*;
|
||||||
|
use crate::s3::error::*;
|
||||||
|
|
||||||
use crate::signature::payload::check_payload_signature;
|
use crate::signature::payload::check_payload_signature;
|
||||||
use crate::signature::streaming::*;
|
use crate::signature::streaming::*;
|
||||||
@ -75,6 +74,7 @@ impl ApiHandler for S3ApiServer {
|
|||||||
const API_NAME_DISPLAY: &'static str = "S3";
|
const API_NAME_DISPLAY: &'static str = "S3";
|
||||||
|
|
||||||
type Endpoint = S3ApiEndpoint;
|
type Endpoint = S3ApiEndpoint;
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
fn parse_endpoint(&self, req: &Request<Body>) -> Result<S3ApiEndpoint, Error> {
|
fn parse_endpoint(&self, req: &Request<Body>) -> Result<S3ApiEndpoint, Error> {
|
||||||
let authority = req
|
let authority = req
|
||||||
@ -122,9 +122,8 @@ impl ApiHandler for S3ApiServer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let (api_key, mut content_sha256) = check_payload_signature(&garage, "s3", &req).await?;
|
let (api_key, mut content_sha256) = check_payload_signature(&garage, "s3", &req).await?;
|
||||||
let api_key = api_key.ok_or_else(|| {
|
let api_key = api_key
|
||||||
Error::Forbidden("Garage does not support anonymous access yet".to_string())
|
.ok_or_else(|| Error::forbidden("Garage does not support anonymous access yet"))?;
|
||||||
})?;
|
|
||||||
|
|
||||||
let req = parse_streaming_body(
|
let req = parse_streaming_body(
|
||||||
&api_key,
|
&api_key,
|
||||||
@ -148,13 +147,14 @@ impl ApiHandler for S3ApiServer {
|
|||||||
return handle_create_bucket(&garage, req, content_sha256, api_key, bucket_name).await;
|
return handle_create_bucket(&garage, req, content_sha256, api_key, bucket_name).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let bucket_id = resolve_bucket(&garage, &bucket_name, &api_key).await?;
|
let bucket_id = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.resolve_bucket(&bucket_name, &api_key)
|
||||||
|
.await?;
|
||||||
let bucket = garage
|
let bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.filter(|b| !b.state.is_deleted())
|
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let allowed = match endpoint.authorization_type() {
|
let allowed = match endpoint.authorization_type() {
|
||||||
Authorization::Read => api_key.allow_read(&bucket_id),
|
Authorization::Read => api_key.allow_read(&bucket_id),
|
||||||
@ -164,9 +164,7 @@ impl ApiHandler for S3ApiServer {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if !allowed {
|
if !allowed {
|
||||||
return Err(Error::Forbidden(
|
return Err(Error::forbidden("Operation is not allowed for this key."));
|
||||||
"Operation is not allowed for this key.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look up what CORS rule might apply to response.
|
// Look up what CORS rule might apply to response.
|
||||||
@ -309,7 +307,7 @@ impl ApiHandler for S3ApiServer {
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
} else {
|
} else {
|
||||||
Err(Error::BadRequest(format!(
|
Err(Error::bad_request(format!(
|
||||||
"Invalid endpoint: list-type={}",
|
"Invalid endpoint: list-type={}",
|
||||||
list_type
|
list_type
|
||||||
)))
|
)))
|
||||||
|
@ -8,13 +8,13 @@ use garage_model::bucket_table::Bucket;
|
|||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::key_table::Key;
|
use garage_model::key_table::Key;
|
||||||
use garage_model::permission::BucketKeyPerm;
|
use garage_model::permission::BucketKeyPerm;
|
||||||
use garage_model::s3::object_table::ObjectFilter;
|
|
||||||
use garage_table::util::*;
|
use garage_table::util::*;
|
||||||
use garage_util::crdt::*;
|
use garage_util::crdt::*;
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
use garage_util::time::*;
|
use garage_util::time::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::common_error::CommonError;
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::signature::verify_signed_content;
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ pub async fn handle_create_bucket(
|
|||||||
|
|
||||||
if let Some(location_constraint) = cmd {
|
if let Some(location_constraint) = cmd {
|
||||||
if location_constraint != garage.config.s3_api.s3_region {
|
if location_constraint != garage.config.s3_api.s3_region {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Cannot satisfy location constraint `{}`: buckets can only be created in region `{}`",
|
"Cannot satisfy location constraint `{}`: buckets can only be created in region `{}`",
|
||||||
location_constraint,
|
location_constraint,
|
||||||
garage.config.s3_api.s3_region
|
garage.config.s3_api.s3_region
|
||||||
@ -158,12 +158,12 @@ pub async fn handle_create_bucket(
|
|||||||
// otherwise return a forbidden error.
|
// otherwise return a forbidden error.
|
||||||
let kp = api_key.bucket_permissions(&bucket_id);
|
let kp = api_key.bucket_permissions(&bucket_id);
|
||||||
if !(kp.allow_write || kp.allow_owner) {
|
if !(kp.allow_write || kp.allow_owner) {
|
||||||
return Err(Error::BucketAlreadyExists);
|
return Err(CommonError::BucketAlreadyExists.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Create the bucket!
|
// Create the bucket!
|
||||||
if !is_valid_bucket_name(&bucket_name) {
|
if !is_valid_bucket_name(&bucket_name) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"{}: {}",
|
"{}: {}",
|
||||||
bucket_name, INVALID_BUCKET_NAME_MESSAGE
|
bucket_name, INVALID_BUCKET_NAME_MESSAGE
|
||||||
)));
|
)));
|
||||||
@ -228,18 +228,8 @@ pub async fn handle_delete_bucket(
|
|||||||
// Delete bucket
|
// Delete bucket
|
||||||
|
|
||||||
// Check bucket is empty
|
// Check bucket is empty
|
||||||
let objects = garage
|
if !garage.bucket_helper().is_bucket_empty(bucket_id).await? {
|
||||||
.object_table
|
return Err(CommonError::BucketNotEmpty.into());
|
||||||
.get_range(
|
|
||||||
&bucket_id,
|
|
||||||
None,
|
|
||||||
Some(ObjectFilter::IsData),
|
|
||||||
10,
|
|
||||||
EnumerationOrder::Forward,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
if !objects.is_empty() {
|
|
||||||
return Err(Error::BucketNotEmpty);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- done checking, now commit ---
|
// --- done checking, now commit ---
|
||||||
|
@ -18,8 +18,8 @@ use garage_model::s3::block_ref_table::*;
|
|||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::helpers::parse_bucket_key;
|
||||||
use crate::helpers::{parse_bucket_key, resolve_bucket};
|
use crate::s3::error::*;
|
||||||
use crate::s3::put::{decode_upload_id, get_headers};
|
use crate::s3::put::{decode_upload_id, get_headers};
|
||||||
use crate::s3::xml::{self as s3_xml, xmlns_tag};
|
use crate::s3::xml::{self as s3_xml, xmlns_tag};
|
||||||
|
|
||||||
@ -201,8 +201,8 @@ pub async fn handle_upload_part_copy(
|
|||||||
let mut ranges = http_range::HttpRange::parse(range_str, source_version_meta.size)
|
let mut ranges = http_range::HttpRange::parse(range_str, source_version_meta.size)
|
||||||
.map_err(|e| (e, source_version_meta.size))?;
|
.map_err(|e| (e, source_version_meta.size))?;
|
||||||
if ranges.len() != 1 {
|
if ranges.len() != 1 {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Invalid x-amz-copy-source-range header: exactly 1 range must be given".into(),
|
"Invalid x-amz-copy-source-range header: exactly 1 range must be given",
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
ranges.pop().unwrap()
|
ranges.pop().unwrap()
|
||||||
@ -230,8 +230,8 @@ pub async fn handle_upload_part_copy(
|
|||||||
// This is only for small files, we don't bother handling this.
|
// This is only for small files, we don't bother handling this.
|
||||||
// (in AWS UploadPartCopy works for parts at least 5MB which
|
// (in AWS UploadPartCopy works for parts at least 5MB which
|
||||||
// is never the case of an inline object)
|
// is never the case of an inline object)
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Source object is too small (minimum part size is 5Mb)".into(),
|
"Source object is too small (minimum part size is 5Mb)",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
ObjectVersionData::FirstBlock(_meta, _first_block_hash) => (),
|
ObjectVersionData::FirstBlock(_meta, _first_block_hash) => (),
|
||||||
@ -250,7 +250,7 @@ pub async fn handle_upload_part_copy(
|
|||||||
// Check this part number hasn't yet been uploaded
|
// Check this part number hasn't yet been uploaded
|
||||||
if let Some(dv) = dest_version {
|
if let Some(dv) = dest_version {
|
||||||
if dv.has_part_number(part_number) {
|
if dv.has_part_number(part_number) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Part number {} has already been uploaded",
|
"Part number {} has already been uploaded",
|
||||||
part_number
|
part_number
|
||||||
)));
|
)));
|
||||||
@ -413,10 +413,13 @@ async fn get_copy_source(
|
|||||||
let copy_source = percent_encoding::percent_decode_str(copy_source).decode_utf8()?;
|
let copy_source = percent_encoding::percent_decode_str(copy_source).decode_utf8()?;
|
||||||
|
|
||||||
let (source_bucket, source_key) = parse_bucket_key(©_source, None)?;
|
let (source_bucket, source_key) = parse_bucket_key(©_source, None)?;
|
||||||
let source_bucket_id = resolve_bucket(garage, &source_bucket.to_string(), api_key).await?;
|
let source_bucket_id = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.resolve_bucket(&source_bucket.to_string(), api_key)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !api_key.allow_read(&source_bucket_id) {
|
if !api_key.allow_read(&source_bucket_id) {
|
||||||
return Err(Error::Forbidden(format!(
|
return Err(Error::forbidden(format!(
|
||||||
"Reading from bucket {} not allowed for this key",
|
"Reading from bucket {} not allowed for this key",
|
||||||
source_bucket
|
source_bucket
|
||||||
)));
|
)));
|
||||||
@ -536,8 +539,8 @@ impl CopyPreconditionHeaders {
|
|||||||
(None, None, None, Some(ims)) => v_date > *ims,
|
(None, None, None, Some(ims)) => v_date > *ims,
|
||||||
(None, None, None, None) => true,
|
(None, None, None, None) => true,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Invalid combination of x-amz-copy-source-if-xxxxx headers".into(),
|
"Invalid combination of x-amz-copy-source-if-xxxxx headers",
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -9,13 +9,12 @@ use hyper::{header::HeaderName, Body, Method, Request, Response, StatusCode};
|
|||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
||||||
use crate::signature::verify_signed_content;
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
use garage_model::bucket_table::{Bucket, CorsRule as GarageCorsRule};
|
use garage_model::bucket_table::{Bucket, CorsRule as GarageCorsRule};
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_table::*;
|
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
|
||||||
pub async fn handle_get_cors(bucket: &Bucket) -> Result<Response<Body>, Error> {
|
pub async fn handle_get_cors(bucket: &Bucket) -> Result<Response<Body>, Error> {
|
||||||
@ -48,14 +47,11 @@ pub async fn handle_delete_cors(
|
|||||||
bucket_id: Uuid,
|
bucket_id: Uuid,
|
||||||
) -> Result<Response<Body>, Error> {
|
) -> Result<Response<Body>, Error> {
|
||||||
let mut bucket = garage
|
let mut bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let param = bucket
|
let param = bucket.params_mut().unwrap();
|
||||||
.params_mut()
|
|
||||||
.ok_or_internal_error("Bucket should not be deleted at this point")?;
|
|
||||||
|
|
||||||
param.cors_config.update(None);
|
param.cors_config.update(None);
|
||||||
garage.bucket_table.insert(&bucket).await?;
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
@ -78,14 +74,11 @@ pub async fn handle_put_cors(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut bucket = garage
|
let mut bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let param = bucket
|
let param = bucket.params_mut().unwrap();
|
||||||
.params_mut()
|
|
||||||
.ok_or_internal_error("Bucket should not be deleted at this point")?;
|
|
||||||
|
|
||||||
let conf: CorsConfiguration = from_reader(&body as &[u8])?;
|
let conf: CorsConfiguration = from_reader(&body as &[u8])?;
|
||||||
conf.validate()?;
|
conf.validate()?;
|
||||||
@ -119,12 +112,7 @@ pub async fn handle_options_s3api(
|
|||||||
let helper = garage.bucket_helper();
|
let helper = garage.bucket_helper();
|
||||||
let bucket_id = helper.resolve_global_bucket_name(&bn).await?;
|
let bucket_id = helper.resolve_global_bucket_name(&bn).await?;
|
||||||
if let Some(id) = bucket_id {
|
if let Some(id) = bucket_id {
|
||||||
let bucket = garage
|
let bucket = garage.bucket_helper().get_existing_bucket(id).await?;
|
||||||
.bucket_table
|
|
||||||
.get(&EmptyKey, &id)
|
|
||||||
.await?
|
|
||||||
.filter(|b| !b.state.is_deleted())
|
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
handle_options_for_bucket(req, &bucket)
|
handle_options_for_bucket(req, &bucket)
|
||||||
} else {
|
} else {
|
||||||
// If there is a bucket name in the request, but that name
|
// If there is a bucket name in the request, but that name
|
||||||
@ -185,7 +173,7 @@ pub fn handle_options_for_bucket(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(Error::Forbidden("This CORS request is not allowed.".into()))
|
Err(Error::forbidden("This CORS request is not allowed."))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_matching_cors_rule<'a>(
|
pub fn find_matching_cors_rule<'a>(
|
||||||
|
@ -8,7 +8,7 @@ use garage_util::time::*;
|
|||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::signature::verify_signed_content;
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
|
@ -2,34 +2,24 @@ use std::convert::TryInto;
|
|||||||
|
|
||||||
use err_derive::Error;
|
use err_derive::Error;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{Body, HeaderMap, StatusCode};
|
||||||
|
|
||||||
use garage_model::helper::error::Error as HelperError;
|
use garage_model::helper::error::Error as HelperError;
|
||||||
use garage_util::error::Error as GarageError;
|
|
||||||
|
|
||||||
|
use crate::common_error::CommonError;
|
||||||
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
|
use crate::generic_server::ApiError;
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
|
use crate::signature::error::Error as SignatureError;
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
// Category: internal error
|
#[error(display = "{}", _0)]
|
||||||
/// Error related to deeper parts of Garage
|
/// Error from common error
|
||||||
#[error(display = "Internal error: {}", _0)]
|
Common(CommonError),
|
||||||
InternalError(#[error(source)] GarageError),
|
|
||||||
|
|
||||||
/// Error related to Hyper
|
|
||||||
#[error(display = "Internal error (Hyper error): {}", _0)]
|
|
||||||
Hyper(#[error(source)] hyper::Error),
|
|
||||||
|
|
||||||
/// Error related to HTTP
|
|
||||||
#[error(display = "Internal error (HTTP error): {}", _0)]
|
|
||||||
Http(#[error(source)] http::Error),
|
|
||||||
|
|
||||||
// Category: cannot process
|
// Category: cannot process
|
||||||
/// No proper api key was used, or the signature was invalid
|
|
||||||
#[error(display = "Forbidden: {}", _0)]
|
|
||||||
Forbidden(String),
|
|
||||||
|
|
||||||
/// Authorization Header Malformed
|
/// Authorization Header Malformed
|
||||||
#[error(display = "Authorization header malformed, expected scope: {}", _0)]
|
#[error(display = "Authorization header malformed, expected scope: {}", _0)]
|
||||||
AuthorizationHeaderMalformed(String),
|
AuthorizationHeaderMalformed(String),
|
||||||
@ -38,22 +28,10 @@ pub enum Error {
|
|||||||
#[error(display = "Key not found")]
|
#[error(display = "Key not found")]
|
||||||
NoSuchKey,
|
NoSuchKey,
|
||||||
|
|
||||||
/// The bucket requested don't exists
|
|
||||||
#[error(display = "Bucket not found")]
|
|
||||||
NoSuchBucket,
|
|
||||||
|
|
||||||
/// The multipart upload requested don't exists
|
/// The multipart upload requested don't exists
|
||||||
#[error(display = "Upload not found")]
|
#[error(display = "Upload not found")]
|
||||||
NoSuchUpload,
|
NoSuchUpload,
|
||||||
|
|
||||||
/// Tried to create a bucket that already exist
|
|
||||||
#[error(display = "Bucket already exists")]
|
|
||||||
BucketAlreadyExists,
|
|
||||||
|
|
||||||
/// Tried to delete a non-empty bucket
|
|
||||||
#[error(display = "Tried to delete a non-empty bucket")]
|
|
||||||
BucketNotEmpty,
|
|
||||||
|
|
||||||
/// Precondition failed (e.g. x-amz-copy-source-if-match)
|
/// Precondition failed (e.g. x-amz-copy-source-if-match)
|
||||||
#[error(display = "At least one of the preconditions you specified did not hold")]
|
#[error(display = "At least one of the preconditions you specified did not hold")]
|
||||||
PreconditionFailed,
|
PreconditionFailed,
|
||||||
@ -80,10 +58,6 @@ pub enum Error {
|
|||||||
#[error(display = "Invalid UTF-8: {}", _0)]
|
#[error(display = "Invalid UTF-8: {}", _0)]
|
||||||
InvalidUtf8String(#[error(source)] std::string::FromUtf8Error),
|
InvalidUtf8String(#[error(source)] std::string::FromUtf8Error),
|
||||||
|
|
||||||
/// Some base64 encoded data was badly encoded
|
|
||||||
#[error(display = "Invalid base64: {}", _0)]
|
|
||||||
InvalidBase64(#[error(source)] base64::DecodeError),
|
|
||||||
|
|
||||||
/// The client sent invalid XML data
|
/// The client sent invalid XML data
|
||||||
#[error(display = "Invalid XML: {}", _0)]
|
#[error(display = "Invalid XML: {}", _0)]
|
||||||
InvalidXml(String),
|
InvalidXml(String),
|
||||||
@ -96,19 +70,34 @@ pub enum Error {
|
|||||||
#[error(display = "Invalid HTTP range: {:?}", _0)]
|
#[error(display = "Invalid HTTP range: {:?}", _0)]
|
||||||
InvalidRange(#[error(from)] (http_range::HttpRangeParseError, u64)),
|
InvalidRange(#[error(from)] (http_range::HttpRangeParseError, u64)),
|
||||||
|
|
||||||
/// The client sent an invalid request
|
|
||||||
#[error(display = "Bad request: {}", _0)]
|
|
||||||
BadRequest(String),
|
|
||||||
|
|
||||||
/// The client asked for an invalid return format (invalid Accept header)
|
|
||||||
#[error(display = "Not acceptable: {}", _0)]
|
|
||||||
NotAcceptable(String),
|
|
||||||
|
|
||||||
/// The client sent a request for an action not supported by garage
|
/// The client sent a request for an action not supported by garage
|
||||||
#[error(display = "Unimplemented action: {}", _0)]
|
#[error(display = "Unimplemented action: {}", _0)]
|
||||||
NotImplemented(String),
|
NotImplemented(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
CommonError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::Common(CommonError::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
|
impl From<HelperError> for Error {
|
||||||
|
fn from(err: HelperError) -> Self {
|
||||||
|
match err {
|
||||||
|
HelperError::Internal(i) => Self::Common(CommonError::InternalError(i)),
|
||||||
|
HelperError::BadRequest(b) => Self::Common(CommonError::BadRequest(b)),
|
||||||
|
HelperError::InvalidBucketName(n) => Self::Common(CommonError::InvalidBucketName(n)),
|
||||||
|
HelperError::NoSuchBucket(n) => Self::Common(CommonError::NoSuchBucket(n)),
|
||||||
|
e => Self::bad_request(format!("{}", e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<roxmltree::Error> for Error {
|
impl From<roxmltree::Error> for Error {
|
||||||
fn from(err: roxmltree::Error) -> Self {
|
fn from(err: roxmltree::Error) -> Self {
|
||||||
Self::InvalidXml(format!("{}", err))
|
Self::InvalidXml(format!("{}", err))
|
||||||
@ -121,88 +110,67 @@ impl From<quick_xml::de::DeError> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<HelperError> for Error {
|
impl From<SignatureError> for Error {
|
||||||
fn from(err: HelperError) -> Self {
|
fn from(err: SignatureError) -> Self {
|
||||||
match err {
|
match err {
|
||||||
HelperError::Internal(i) => Self::InternalError(i),
|
SignatureError::Common(c) => Self::Common(c),
|
||||||
HelperError::BadRequest(b) => Self::BadRequest(b),
|
SignatureError::AuthorizationHeaderMalformed(c) => {
|
||||||
|
Self::AuthorizationHeaderMalformed(c)
|
||||||
|
}
|
||||||
|
SignatureError::InvalidUtf8Str(i) => Self::InvalidUtf8Str(i),
|
||||||
|
SignatureError::InvalidHeader(h) => Self::InvalidHeader(h),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<multer::Error> for Error {
|
impl From<multer::Error> for Error {
|
||||||
fn from(err: multer::Error) -> Self {
|
fn from(err: multer::Error) -> Self {
|
||||||
Self::BadRequest(err.to_string())
|
Self::bad_request(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
/// Get the HTTP status code that best represents the meaning of the error for the client
|
|
||||||
pub fn http_status_code(&self) -> StatusCode {
|
|
||||||
match self {
|
|
||||||
Error::NoSuchKey | Error::NoSuchBucket | Error::NoSuchUpload => StatusCode::NOT_FOUND,
|
|
||||||
Error::BucketNotEmpty | Error::BucketAlreadyExists => StatusCode::CONFLICT,
|
|
||||||
Error::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
|
|
||||||
Error::Forbidden(_) => StatusCode::FORBIDDEN,
|
|
||||||
Error::NotAcceptable(_) => StatusCode::NOT_ACCEPTABLE,
|
|
||||||
Error::InternalError(
|
|
||||||
GarageError::Timeout
|
|
||||||
| GarageError::RemoteError(_)
|
|
||||||
| GarageError::Quorum(_, _, _, _),
|
|
||||||
) => StatusCode::SERVICE_UNAVAILABLE,
|
|
||||||
Error::InternalError(_) | Error::Hyper(_) | Error::Http(_) => {
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR
|
|
||||||
}
|
|
||||||
Error::InvalidRange(_) => StatusCode::RANGE_NOT_SATISFIABLE,
|
|
||||||
Error::NotImplemented(_) => StatusCode::NOT_IMPLEMENTED,
|
|
||||||
_ => StatusCode::BAD_REQUEST,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn aws_code(&self) -> &'static str {
|
pub fn aws_code(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
Error::Common(c) => c.aws_code(),
|
||||||
Error::NoSuchKey => "NoSuchKey",
|
Error::NoSuchKey => "NoSuchKey",
|
||||||
Error::NoSuchBucket => "NoSuchBucket",
|
|
||||||
Error::NoSuchUpload => "NoSuchUpload",
|
Error::NoSuchUpload => "NoSuchUpload",
|
||||||
Error::BucketAlreadyExists => "BucketAlreadyExists",
|
|
||||||
Error::BucketNotEmpty => "BucketNotEmpty",
|
|
||||||
Error::PreconditionFailed => "PreconditionFailed",
|
Error::PreconditionFailed => "PreconditionFailed",
|
||||||
Error::InvalidPart => "InvalidPart",
|
Error::InvalidPart => "InvalidPart",
|
||||||
Error::InvalidPartOrder => "InvalidPartOrder",
|
Error::InvalidPartOrder => "InvalidPartOrder",
|
||||||
Error::EntityTooSmall => "EntityTooSmall",
|
Error::EntityTooSmall => "EntityTooSmall",
|
||||||
Error::Forbidden(_) => "AccessDenied",
|
|
||||||
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
|
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
|
||||||
Error::NotImplemented(_) => "NotImplemented",
|
Error::NotImplemented(_) => "NotImplemented",
|
||||||
Error::InternalError(
|
Error::InvalidXml(_) => "MalformedXML",
|
||||||
GarageError::Timeout
|
Error::InvalidRange(_) => "InvalidRange",
|
||||||
| GarageError::RemoteError(_)
|
Error::InvalidUtf8Str(_) | Error::InvalidUtf8String(_) | Error::InvalidHeader(_) => {
|
||||||
| GarageError::Quorum(_, _, _, _),
|
"InvalidRequest"
|
||||||
) => "ServiceUnavailable",
|
}
|
||||||
Error::InternalError(_) | Error::Hyper(_) | Error::Http(_) => "InternalError",
|
}
|
||||||
_ => "InvalidRequest",
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiError for Error {
|
||||||
|
/// Get the HTTP status code that best represents the meaning of the error for the client
|
||||||
|
fn http_status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::Common(c) => c.http_status_code(),
|
||||||
|
Error::NoSuchKey | Error::NoSuchUpload => StatusCode::NOT_FOUND,
|
||||||
|
Error::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
|
||||||
|
Error::InvalidRange(_) => StatusCode::RANGE_NOT_SATISFIABLE,
|
||||||
|
Error::NotImplemented(_) => StatusCode::NOT_IMPLEMENTED,
|
||||||
|
Error::AuthorizationHeaderMalformed(_)
|
||||||
|
| Error::InvalidPart
|
||||||
|
| Error::InvalidPartOrder
|
||||||
|
| Error::EntityTooSmall
|
||||||
|
| Error::InvalidXml(_)
|
||||||
|
| Error::InvalidUtf8Str(_)
|
||||||
|
| Error::InvalidUtf8String(_)
|
||||||
|
| Error::InvalidHeader(_) => StatusCode::BAD_REQUEST,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn aws_xml(&self, garage_region: &str, path: &str) -> String {
|
fn add_http_headers(&self, header_map: &mut HeaderMap<HeaderValue>) {
|
||||||
let error = s3_xml::Error {
|
|
||||||
code: s3_xml::Value(self.aws_code().to_string()),
|
|
||||||
message: s3_xml::Value(format!("{}", self)),
|
|
||||||
resource: Some(s3_xml::Value(path.to_string())),
|
|
||||||
region: Some(s3_xml::Value(garage_region.to_string())),
|
|
||||||
};
|
|
||||||
s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
|
|
||||||
r#"
|
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<Error>
|
|
||||||
<Code>InternalError</Code>
|
|
||||||
<Message>XML encoding of error failed</Message>
|
|
||||||
</Error>
|
|
||||||
"#
|
|
||||||
.into()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_headers(&self, header_map: &mut HeaderMap<HeaderValue>) {
|
|
||||||
use hyper::header;
|
use hyper::header;
|
||||||
#[allow(clippy::single_match)]
|
#[allow(clippy::single_match)]
|
||||||
match self {
|
match self {
|
||||||
@ -217,68 +185,23 @@ impl Error {
|
|||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Trait to map error to the Bad Request error code
|
fn http_body(&self, garage_region: &str, path: &str) -> Body {
|
||||||
pub trait OkOrBadRequest {
|
let error = s3_xml::Error {
|
||||||
type S;
|
code: s3_xml::Value(self.aws_code().to_string()),
|
||||||
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<Self::S, Error>;
|
message: s3_xml::Value(format!("{}", self)),
|
||||||
}
|
resource: Some(s3_xml::Value(path.to_string())),
|
||||||
|
region: Some(s3_xml::Value(garage_region.to_string())),
|
||||||
impl<T, E> OkOrBadRequest for Result<T, E>
|
};
|
||||||
where
|
Body::from(s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
|
||||||
E: std::fmt::Display,
|
r#"
|
||||||
{
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
type S = T;
|
<Error>
|
||||||
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<T, Error> {
|
<Code>InternalError</Code>
|
||||||
match self {
|
<Message>XML encoding of error failed</Message>
|
||||||
Ok(x) => Ok(x),
|
</Error>
|
||||||
Err(e) => Err(Error::BadRequest(format!("{}: {}", reason.as_ref(), e))),
|
"#
|
||||||
}
|
.into()
|
||||||
}
|
}))
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> OkOrBadRequest for Option<T> {
|
|
||||||
type S = T;
|
|
||||||
fn ok_or_bad_request<M: AsRef<str>>(self, reason: M) -> Result<T, Error> {
|
|
||||||
match self {
|
|
||||||
Some(x) => Ok(x),
|
|
||||||
None => Err(Error::BadRequest(reason.as_ref().to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Trait to map an error to an Internal Error code
|
|
||||||
pub trait OkOrInternalError {
|
|
||||||
type S;
|
|
||||||
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<Self::S, Error>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T, E> OkOrInternalError for Result<T, E>
|
|
||||||
where
|
|
||||||
E: std::fmt::Display,
|
|
||||||
{
|
|
||||||
type S = T;
|
|
||||||
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<T, Error> {
|
|
||||||
match self {
|
|
||||||
Ok(x) => Ok(x),
|
|
||||||
Err(e) => Err(Error::InternalError(GarageError::Message(format!(
|
|
||||||
"{}: {}",
|
|
||||||
reason.as_ref(),
|
|
||||||
e
|
|
||||||
)))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> OkOrInternalError for Option<T> {
|
|
||||||
type S = T;
|
|
||||||
fn ok_or_internal_error<M: AsRef<str>>(self, reason: M) -> Result<T, Error> {
|
|
||||||
match self {
|
|
||||||
Some(x) => Ok(x),
|
|
||||||
None => Err(Error::InternalError(GarageError::Message(
|
|
||||||
reason.as_ref().to_string(),
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -17,7 +17,7 @@ use garage_model::garage::Garage;
|
|||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
|
|
||||||
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
|
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
|
||||||
|
|
||||||
@ -210,8 +210,8 @@ pub async fn handle_get(
|
|||||||
|
|
||||||
match (part_number, parse_range_header(req, last_v_meta.size)?) {
|
match (part_number, parse_range_header(req, last_v_meta.size)?) {
|
||||||
(Some(_), Some(_)) => {
|
(Some(_), Some(_)) => {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Cannot specify both partNumber and Range header".into(),
|
"Cannot specify both partNumber and Range header",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
(Some(pn), None) => {
|
(Some(pn), None) => {
|
||||||
@ -302,9 +302,9 @@ async fn handle_get_range(
|
|||||||
let body: Body = Body::from(bytes[begin as usize..end as usize].to_vec());
|
let body: Body = Body::from(bytes[begin as usize..end as usize].to_vec());
|
||||||
Ok(resp_builder.body(body)?)
|
Ok(resp_builder.body(body)?)
|
||||||
} else {
|
} else {
|
||||||
None.ok_or_internal_error(
|
Err(Error::internal_error(
|
||||||
"Requested range not present in inline bytes when it should have been",
|
"Requested range not present in inline bytes when it should have been",
|
||||||
)
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ObjectVersionData::FirstBlock(_meta, _first_block_hash) => {
|
ObjectVersionData::FirstBlock(_meta, _first_block_hash) => {
|
||||||
|
@ -16,8 +16,8 @@ use garage_model::s3::version_table::Version;
|
|||||||
use garage_table::{EmptyKey, EnumerationOrder};
|
use garage_table::{EmptyKey, EnumerationOrder};
|
||||||
|
|
||||||
use crate::encoding::*;
|
use crate::encoding::*;
|
||||||
use crate::error::*;
|
|
||||||
use crate::helpers::key_after_prefix;
|
use crate::helpers::key_after_prefix;
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::s3::put as s3_put;
|
use crate::s3::put as s3_put;
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
|
|
||||||
@ -582,13 +582,19 @@ impl ListObjectsQuery {
|
|||||||
// representing the key to start with.
|
// representing the key to start with.
|
||||||
(Some(token), _) => match &token[..1] {
|
(Some(token), _) => match &token[..1] {
|
||||||
"[" => Ok(RangeBegin::IncludingKey {
|
"[" => Ok(RangeBegin::IncludingKey {
|
||||||
key: String::from_utf8(base64::decode(token[1..].as_bytes())?)?,
|
key: String::from_utf8(
|
||||||
|
base64::decode(token[1..].as_bytes())
|
||||||
|
.ok_or_bad_request("Invalid continuation token")?,
|
||||||
|
)?,
|
||||||
fallback_key: None,
|
fallback_key: None,
|
||||||
}),
|
}),
|
||||||
"]" => Ok(RangeBegin::AfterKey {
|
"]" => Ok(RangeBegin::AfterKey {
|
||||||
key: String::from_utf8(base64::decode(token[1..].as_bytes())?)?,
|
key: String::from_utf8(
|
||||||
|
base64::decode(token[1..].as_bytes())
|
||||||
|
.ok_or_bad_request("Invalid continuation token")?,
|
||||||
|
)?,
|
||||||
}),
|
}),
|
||||||
_ => Err(Error::BadRequest("Invalid continuation token".to_string())),
|
_ => Err(Error::bad_request("Invalid continuation token")),
|
||||||
},
|
},
|
||||||
|
|
||||||
// StartAfter has defined semantics in the spec:
|
// StartAfter has defined semantics in the spec:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
pub mod api_server;
|
pub mod api_server;
|
||||||
|
pub mod error;
|
||||||
|
|
||||||
mod bucket;
|
mod bucket;
|
||||||
mod copy;
|
mod copy;
|
||||||
|
@ -14,8 +14,7 @@ use serde::Deserialize;
|
|||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
use crate::helpers::resolve_bucket;
|
|
||||||
use crate::s3::put::{get_headers, save_stream};
|
use crate::s3::put::{get_headers, save_stream};
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::signature::payload::{parse_date, verify_v4};
|
use crate::signature::payload::{parse_date, verify_v4};
|
||||||
@ -48,9 +47,7 @@ pub async fn handle_post_object(
|
|||||||
let field = if let Some(field) = multipart.next_field().await? {
|
let field = if let Some(field) = multipart.next_field().await? {
|
||||||
field
|
field
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request("Request did not contain a file"));
|
||||||
"Request did not contain a file".to_owned(),
|
|
||||||
));
|
|
||||||
};
|
};
|
||||||
let name: HeaderName = if let Some(Ok(name)) = field.name().map(TryInto::try_into) {
|
let name: HeaderName = if let Some(Ok(name)) = field.name().map(TryInto::try_into) {
|
||||||
name
|
name
|
||||||
@ -66,14 +63,14 @@ pub async fn handle_post_object(
|
|||||||
"tag" => (/* tag need to be reencoded, but we don't support them yet anyway */),
|
"tag" => (/* tag need to be reencoded, but we don't support them yet anyway */),
|
||||||
"acl" => {
|
"acl" => {
|
||||||
if params.insert("x-amz-acl", content).is_some() {
|
if params.insert("x-amz-acl", content).is_some() {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Field 'acl' provided more than one time".to_string(),
|
"Field 'acl' provided more than one time",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if params.insert(&name, content).is_some() {
|
if params.insert(&name, content).is_some() {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Field '{}' provided more than one time",
|
"Field '{}' provided more than one time",
|
||||||
name
|
name
|
||||||
)));
|
)));
|
||||||
@ -90,9 +87,7 @@ pub async fn handle_post_object(
|
|||||||
.to_str()?;
|
.to_str()?;
|
||||||
let credential = params
|
let credential = params
|
||||||
.get("x-amz-credential")
|
.get("x-amz-credential")
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| Error::forbidden("Garage does not support anonymous access yet"))?
|
||||||
Error::Forbidden("Garage does not support anonymous access yet".to_string())
|
|
||||||
})?
|
|
||||||
.to_str()?;
|
.to_str()?;
|
||||||
let policy = params
|
let policy = params
|
||||||
.get("policy")
|
.get("policy")
|
||||||
@ -129,15 +124,16 @@ pub async fn handle_post_object(
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let bucket_id = resolve_bucket(&garage, &bucket, &api_key).await?;
|
let bucket_id = garage
|
||||||
|
.bucket_helper()
|
||||||
|
.resolve_bucket(&bucket, &api_key)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !api_key.allow_write(&bucket_id) {
|
if !api_key.allow_write(&bucket_id) {
|
||||||
return Err(Error::Forbidden(
|
return Err(Error::forbidden("Operation is not allowed for this key."));
|
||||||
"Operation is not allowed for this key.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let decoded_policy = base64::decode(&policy)?;
|
let decoded_policy = base64::decode(&policy).ok_or_bad_request("Invalid policy")?;
|
||||||
let decoded_policy: Policy =
|
let decoded_policy: Policy =
|
||||||
serde_json::from_slice(&decoded_policy).ok_or_bad_request("Invalid policy")?;
|
serde_json::from_slice(&decoded_policy).ok_or_bad_request("Invalid policy")?;
|
||||||
|
|
||||||
@ -145,9 +141,7 @@ pub async fn handle_post_object(
|
|||||||
.ok_or_bad_request("Invalid expiration date")?
|
.ok_or_bad_request("Invalid expiration date")?
|
||||||
.into();
|
.into();
|
||||||
if Utc::now() - expiration > Duration::zero() {
|
if Utc::now() - expiration > Duration::zero() {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request("Expiration date is in the paste"));
|
||||||
"Expiration date is in the paste".to_string(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut conditions = decoded_policy.into_conditions()?;
|
let mut conditions = decoded_policy.into_conditions()?;
|
||||||
@ -159,7 +153,7 @@ pub async fn handle_post_object(
|
|||||||
"policy" | "x-amz-signature" => (), // this is always accepted, as it's required to validate other fields
|
"policy" | "x-amz-signature" => (), // this is always accepted, as it's required to validate other fields
|
||||||
"content-type" => {
|
"content-type" => {
|
||||||
let conds = conditions.params.remove("content-type").ok_or_else(|| {
|
let conds = conditions.params.remove("content-type").ok_or_else(|| {
|
||||||
Error::BadRequest(format!("Key '{}' is not allowed in policy", param_key))
|
Error::bad_request(format!("Key '{}' is not allowed in policy", param_key))
|
||||||
})?;
|
})?;
|
||||||
for cond in conds {
|
for cond in conds {
|
||||||
let ok = match cond {
|
let ok = match cond {
|
||||||
@ -169,7 +163,7 @@ pub async fn handle_post_object(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
if !ok {
|
if !ok {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Key '{}' has value not allowed in policy",
|
"Key '{}' has value not allowed in policy",
|
||||||
param_key
|
param_key
|
||||||
)));
|
)));
|
||||||
@ -178,7 +172,7 @@ pub async fn handle_post_object(
|
|||||||
}
|
}
|
||||||
"key" => {
|
"key" => {
|
||||||
let conds = conditions.params.remove("key").ok_or_else(|| {
|
let conds = conditions.params.remove("key").ok_or_else(|| {
|
||||||
Error::BadRequest(format!("Key '{}' is not allowed in policy", param_key))
|
Error::bad_request(format!("Key '{}' is not allowed in policy", param_key))
|
||||||
})?;
|
})?;
|
||||||
for cond in conds {
|
for cond in conds {
|
||||||
let ok = match cond {
|
let ok = match cond {
|
||||||
@ -186,7 +180,7 @@ pub async fn handle_post_object(
|
|||||||
Operation::StartsWith(s) => key.starts_with(&s),
|
Operation::StartsWith(s) => key.starts_with(&s),
|
||||||
};
|
};
|
||||||
if !ok {
|
if !ok {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Key '{}' has value not allowed in policy",
|
"Key '{}' has value not allowed in policy",
|
||||||
param_key
|
param_key
|
||||||
)));
|
)));
|
||||||
@ -201,7 +195,7 @@ pub async fn handle_post_object(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let conds = conditions.params.remove(¶m_key).ok_or_else(|| {
|
let conds = conditions.params.remove(¶m_key).ok_or_else(|| {
|
||||||
Error::BadRequest(format!("Key '{}' is not allowed in policy", param_key))
|
Error::bad_request(format!("Key '{}' is not allowed in policy", param_key))
|
||||||
})?;
|
})?;
|
||||||
for cond in conds {
|
for cond in conds {
|
||||||
let ok = match cond {
|
let ok = match cond {
|
||||||
@ -209,7 +203,7 @@ pub async fn handle_post_object(
|
|||||||
Operation::StartsWith(s) => value.to_str()?.starts_with(s.as_str()),
|
Operation::StartsWith(s) => value.to_str()?.starts_with(s.as_str()),
|
||||||
};
|
};
|
||||||
if !ok {
|
if !ok {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Key '{}' has value not allowed in policy",
|
"Key '{}' has value not allowed in policy",
|
||||||
param_key
|
param_key
|
||||||
)));
|
)));
|
||||||
@ -220,7 +214,7 @@ pub async fn handle_post_object(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some((param_key, _)) = conditions.params.iter().next() {
|
if let Some((param_key, _)) = conditions.params.iter().next() {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Key '{}' is required in policy, but no value was provided",
|
"Key '{}' is required in policy, but no value was provided",
|
||||||
param_key
|
param_key
|
||||||
)));
|
)));
|
||||||
@ -326,7 +320,7 @@ impl Policy {
|
|||||||
match condition {
|
match condition {
|
||||||
PolicyCondition::Equal(map) => {
|
PolicyCondition::Equal(map) => {
|
||||||
if map.len() != 1 {
|
if map.len() != 1 {
|
||||||
return Err(Error::BadRequest("Invalid policy item".to_owned()));
|
return Err(Error::bad_request("Invalid policy item"));
|
||||||
}
|
}
|
||||||
let (mut k, v) = map.into_iter().next().expect("size was verified");
|
let (mut k, v) = map.into_iter().next().expect("size was verified");
|
||||||
k.make_ascii_lowercase();
|
k.make_ascii_lowercase();
|
||||||
@ -334,7 +328,7 @@ impl Policy {
|
|||||||
}
|
}
|
||||||
PolicyCondition::OtherOp([cond, mut key, value]) => {
|
PolicyCondition::OtherOp([cond, mut key, value]) => {
|
||||||
if key.remove(0) != '$' {
|
if key.remove(0) != '$' {
|
||||||
return Err(Error::BadRequest("Invalid policy item".to_owned()));
|
return Err(Error::bad_request("Invalid policy item"));
|
||||||
}
|
}
|
||||||
key.make_ascii_lowercase();
|
key.make_ascii_lowercase();
|
||||||
match cond.as_str() {
|
match cond.as_str() {
|
||||||
@ -347,7 +341,7 @@ impl Policy {
|
|||||||
.or_default()
|
.or_default()
|
||||||
.push(Operation::StartsWith(value));
|
.push(Operation::StartsWith(value));
|
||||||
}
|
}
|
||||||
_ => return Err(Error::BadRequest("Invalid policy item".to_owned())),
|
_ => return Err(Error::bad_request("Invalid policy item")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PolicyCondition::SizeRange(key, min, max) => {
|
PolicyCondition::SizeRange(key, min, max) => {
|
||||||
@ -355,7 +349,7 @@ impl Policy {
|
|||||||
length.0 = length.0.max(min);
|
length.0 = length.0.max(min);
|
||||||
length.1 = length.1.min(max);
|
length.1 = length.1.min(max);
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::BadRequest("Invalid policy item".to_owned()));
|
return Err(Error::bad_request("Invalid policy item"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -420,15 +414,15 @@ where
|
|||||||
self.read += bytes.len() as u64;
|
self.read += bytes.len() as u64;
|
||||||
// optimization to fail early when we know before the end it's too long
|
// optimization to fail early when we know before the end it's too long
|
||||||
if self.length.end() < &self.read {
|
if self.length.end() < &self.read {
|
||||||
return Poll::Ready(Some(Err(Error::BadRequest(
|
return Poll::Ready(Some(Err(Error::bad_request(
|
||||||
"File size does not match policy".to_owned(),
|
"File size does not match policy",
|
||||||
))));
|
))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Poll::Ready(None) => {
|
Poll::Ready(None) => {
|
||||||
if !self.length.contains(&self.read) {
|
if !self.length.contains(&self.read) {
|
||||||
return Poll::Ready(Some(Err(Error::BadRequest(
|
return Poll::Ready(Some(Err(Error::bad_request(
|
||||||
"File size does not match policy".to_owned(),
|
"File size does not match policy",
|
||||||
))));
|
))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ use garage_model::s3::block_ref_table::*;
|
|||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
use crate::s3::xml as s3_xml;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::signature::verify_signed_content;
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
@ -183,8 +183,8 @@ fn ensure_checksum_matches(
|
|||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
if let Some(expected_sha256) = content_sha256 {
|
if let Some(expected_sha256) = content_sha256 {
|
||||||
if expected_sha256 != data_sha256sum {
|
if expected_sha256 != data_sha256sum {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Unable to validate x-amz-content-sha256".to_string(),
|
"Unable to validate x-amz-content-sha256",
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
trace!("Successfully validated x-amz-content-sha256");
|
trace!("Successfully validated x-amz-content-sha256");
|
||||||
@ -192,9 +192,7 @@ fn ensure_checksum_matches(
|
|||||||
}
|
}
|
||||||
if let Some(expected_md5) = content_md5 {
|
if let Some(expected_md5) = content_md5 {
|
||||||
if expected_md5.trim_matches('"') != base64::encode(data_md5sum) {
|
if expected_md5.trim_matches('"') != base64::encode(data_md5sum) {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request("Unable to validate content-md5"));
|
||||||
"Unable to validate content-md5".to_string(),
|
|
||||||
));
|
|
||||||
} else {
|
} else {
|
||||||
trace!("Successfully validated content-md5");
|
trace!("Successfully validated content-md5");
|
||||||
}
|
}
|
||||||
@ -428,7 +426,7 @@ pub async fn handle_put_part(
|
|||||||
// Check part hasn't already been uploaded
|
// Check part hasn't already been uploaded
|
||||||
if let Some(v) = version {
|
if let Some(v) = version {
|
||||||
if v.has_part_number(part_number) {
|
if v.has_part_number(part_number) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::bad_request(format!(
|
||||||
"Part number {} has already been uploaded",
|
"Part number {} has already been uploaded",
|
||||||
part_number
|
part_number
|
||||||
)));
|
)));
|
||||||
@ -513,7 +511,7 @@ pub async fn handle_complete_multipart_upload(
|
|||||||
|
|
||||||
let version = version.ok_or(Error::NoSuchKey)?;
|
let version = version.ok_or(Error::NoSuchKey)?;
|
||||||
if version.blocks.is_empty() {
|
if version.blocks.is_empty() {
|
||||||
return Err(Error::BadRequest("No data was uploaded".to_string()));
|
return Err(Error::bad_request("No data was uploaded"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let headers = match object_version.state {
|
let headers = match object_version.state {
|
||||||
@ -574,8 +572,8 @@ pub async fn handle_complete_multipart_upload(
|
|||||||
.map(|x| x.part_number)
|
.map(|x| x.part_number)
|
||||||
.eq(block_parts.into_iter());
|
.eq(block_parts.into_iter());
|
||||||
if !same_parts {
|
if !same_parts {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Part numbers in block list and part list do not match. This can happen if a part was partially uploaded. Please abort the multipart upload and try again.".into(),
|
"Part numbers in block list and part list do not match. This can happen if a part was partially uploaded. Please abort the multipart upload and try again."
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
use crate::error::{Error, OkOrBadRequest};
|
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
@ -7,6 +5,7 @@ use hyper::{HeaderMap, Method, Request};
|
|||||||
|
|
||||||
use crate::helpers::Authorization;
|
use crate::helpers::Authorization;
|
||||||
use crate::router_macros::{generateQueryParameters, router_match};
|
use crate::router_macros::{generateQueryParameters, router_match};
|
||||||
|
use crate::s3::error::*;
|
||||||
|
|
||||||
router_match! {@func
|
router_match! {@func
|
||||||
|
|
||||||
@ -343,7 +342,7 @@ impl Endpoint {
|
|||||||
Method::POST => Self::from_post(key, &mut query)?,
|
Method::POST => Self::from_post(key, &mut query)?,
|
||||||
Method::PUT => Self::from_put(key, &mut query, req.headers())?,
|
Method::PUT => Self::from_put(key, &mut query, req.headers())?,
|
||||||
Method::DELETE => Self::from_delete(key, &mut query)?,
|
Method::DELETE => Self::from_delete(key, &mut query)?,
|
||||||
_ => return Err(Error::BadRequest("Unknown method".to_owned())),
|
_ => return Err(Error::bad_request("Unknown method")),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(message) = query.nonempty_message() {
|
if let Some(message) = query.nonempty_message() {
|
||||||
|
@ -4,13 +4,12 @@ use std::sync::Arc;
|
|||||||
use hyper::{Body, Request, Response, StatusCode};
|
use hyper::{Body, Request, Response, StatusCode};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
||||||
use crate::signature::verify_signed_content;
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
use garage_model::bucket_table::*;
|
use garage_model::bucket_table::*;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_table::*;
|
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
|
||||||
pub async fn handle_get_website(bucket: &Bucket) -> Result<Response<Body>, Error> {
|
pub async fn handle_get_website(bucket: &Bucket) -> Result<Response<Body>, Error> {
|
||||||
@ -47,14 +46,11 @@ pub async fn handle_delete_website(
|
|||||||
bucket_id: Uuid,
|
bucket_id: Uuid,
|
||||||
) -> Result<Response<Body>, Error> {
|
) -> Result<Response<Body>, Error> {
|
||||||
let mut bucket = garage
|
let mut bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let param = bucket
|
let param = bucket.params_mut().unwrap();
|
||||||
.params_mut()
|
|
||||||
.ok_or_internal_error("Bucket should not be deleted at this point")?;
|
|
||||||
|
|
||||||
param.website_config.update(None);
|
param.website_config.update(None);
|
||||||
garage.bucket_table.insert(&bucket).await?;
|
garage.bucket_table.insert(&bucket).await?;
|
||||||
@ -77,14 +73,11 @@ pub async fn handle_put_website(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut bucket = garage
|
let mut bucket = garage
|
||||||
.bucket_table
|
.bucket_helper()
|
||||||
.get(&EmptyKey, &bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await?
|
.await?;
|
||||||
.ok_or(Error::NoSuchBucket)?;
|
|
||||||
|
|
||||||
let param = bucket
|
let param = bucket.params_mut().unwrap();
|
||||||
.params_mut()
|
|
||||||
.ok_or_internal_error("Bucket should not be deleted at this point")?;
|
|
||||||
|
|
||||||
let conf: WebsiteConfiguration = from_reader(&body as &[u8])?;
|
let conf: WebsiteConfiguration = from_reader(&body as &[u8])?;
|
||||||
conf.validate()?;
|
conf.validate()?;
|
||||||
@ -176,8 +169,8 @@ impl WebsiteConfiguration {
|
|||||||
|| self.index_document.is_some()
|
|| self.index_document.is_some()
|
||||||
|| self.routing_rules.is_some())
|
|| self.routing_rules.is_some())
|
||||||
{
|
{
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Bad XML: can't have RedirectAllRequestsTo and other fields".to_owned(),
|
"Bad XML: can't have RedirectAllRequestsTo and other fields",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if let Some(ref ed) = self.error_document {
|
if let Some(ref ed) = self.error_document {
|
||||||
@ -222,8 +215,8 @@ impl WebsiteConfiguration {
|
|||||||
impl Key {
|
impl Key {
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.key.0.is_empty() {
|
if self.key.0.is_empty() {
|
||||||
Err(Error::BadRequest(
|
Err(Error::bad_request(
|
||||||
"Bad XML: error document specified but empty".to_owned(),
|
"Bad XML: error document specified but empty",
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -234,8 +227,8 @@ impl Key {
|
|||||||
impl Suffix {
|
impl Suffix {
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.suffix.0.is_empty() | self.suffix.0.contains('/') {
|
if self.suffix.0.is_empty() | self.suffix.0.contains('/') {
|
||||||
Err(Error::BadRequest(
|
Err(Error::bad_request(
|
||||||
"Bad XML: index document is empty or contains /".to_owned(),
|
"Bad XML: index document is empty or contains /",
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -247,7 +240,7 @@ impl Target {
|
|||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if let Some(ref protocol) = self.protocol {
|
if let Some(ref protocol) = self.protocol {
|
||||||
if protocol.0 != "http" && protocol.0 != "https" {
|
if protocol.0 != "http" && protocol.0 != "https" {
|
||||||
return Err(Error::BadRequest("Bad XML: invalid protocol".to_owned()));
|
return Err(Error::bad_request("Bad XML: invalid protocol"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -269,19 +262,19 @@ impl Redirect {
|
|||||||
pub fn validate(&self, has_prefix: bool) -> Result<(), Error> {
|
pub fn validate(&self, has_prefix: bool) -> Result<(), Error> {
|
||||||
if self.replace_prefix.is_some() {
|
if self.replace_prefix.is_some() {
|
||||||
if self.replace_full.is_some() {
|
if self.replace_full.is_some() {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Bad XML: both ReplaceKeyPrefixWith and ReplaceKeyWith are set".to_owned(),
|
"Bad XML: both ReplaceKeyPrefixWith and ReplaceKeyWith are set",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if !has_prefix {
|
if !has_prefix {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Bad XML: ReplaceKeyPrefixWith is set, but KeyPrefixEquals isn't".to_owned(),
|
"Bad XML: ReplaceKeyPrefixWith is set, but KeyPrefixEquals isn't",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(ref protocol) = self.protocol {
|
if let Some(ref protocol) = self.protocol {
|
||||||
if protocol.0 != "http" && protocol.0 != "https" {
|
if protocol.0 != "http" && protocol.0 != "https" {
|
||||||
return Err(Error::BadRequest("Bad XML: invalid protocol".to_owned()));
|
return Err(Error::bad_request("Bad XML: invalid protocol"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// TODO there are probably more invalide cases, but which ones?
|
// TODO there are probably more invalide cases, but which ones?
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use quick_xml::se::to_string;
|
use quick_xml::se::to_string;
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
use crate::Error as ApiError;
|
use crate::s3::error::Error as ApiError;
|
||||||
|
|
||||||
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
|
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
|
||||||
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
||||||
|
36
src/api/signature/error.rs
Normal file
36
src/api/signature/error.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
use err_derive::Error;
|
||||||
|
|
||||||
|
use crate::common_error::CommonError;
|
||||||
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
|
|
||||||
|
/// Errors of this crate
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(display = "{}", _0)]
|
||||||
|
/// Error from common error
|
||||||
|
Common(CommonError),
|
||||||
|
|
||||||
|
/// Authorization Header Malformed
|
||||||
|
#[error(display = "Authorization header malformed, expected scope: {}", _0)]
|
||||||
|
AuthorizationHeaderMalformed(String),
|
||||||
|
|
||||||
|
// Category: bad request
|
||||||
|
/// The request contained an invalid UTF-8 sequence in its path or in other parameters
|
||||||
|
#[error(display = "Invalid UTF-8: {}", _0)]
|
||||||
|
InvalidUtf8Str(#[error(source)] std::str::Utf8Error),
|
||||||
|
|
||||||
|
/// The client sent a header with invalid value
|
||||||
|
#[error(display = "Invalid header value: {}", _0)]
|
||||||
|
InvalidHeader(#[error(source)] hyper::header::ToStrError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
CommonError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::Common(CommonError::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
@ -4,11 +4,12 @@ use sha2::Sha256;
|
|||||||
|
|
||||||
use garage_util::data::{sha256sum, Hash};
|
use garage_util::data::{sha256sum, Hash};
|
||||||
|
|
||||||
use crate::error::*;
|
pub mod error;
|
||||||
|
|
||||||
pub mod payload;
|
pub mod payload;
|
||||||
pub mod streaming;
|
pub mod streaming;
|
||||||
|
|
||||||
|
use error::*;
|
||||||
|
|
||||||
pub const SHORT_DATE: &str = "%Y%m%d";
|
pub const SHORT_DATE: &str = "%Y%m%d";
|
||||||
pub const LONG_DATETIME: &str = "%Y%m%dT%H%M%SZ";
|
pub const LONG_DATETIME: &str = "%Y%m%dT%H%M%SZ";
|
||||||
|
|
||||||
@ -16,7 +17,7 @@ type HmacSha256 = Hmac<Sha256>;
|
|||||||
|
|
||||||
pub fn verify_signed_content(expected_sha256: Hash, body: &[u8]) -> Result<(), Error> {
|
pub fn verify_signed_content(expected_sha256: Hash, body: &[u8]) -> Result<(), Error> {
|
||||||
if expected_sha256 != sha256sum(body) {
|
if expected_sha256 != sha256sum(body) {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Request content hash does not match signed hash".to_string(),
|
"Request content hash does not match signed hash".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ use super::LONG_DATETIME;
|
|||||||
use super::{compute_scope, signing_hmac};
|
use super::{compute_scope, signing_hmac};
|
||||||
|
|
||||||
use crate::encoding::uri_encode;
|
use crate::encoding::uri_encode;
|
||||||
use crate::error::*;
|
use crate::signature::error::*;
|
||||||
|
|
||||||
pub async fn check_payload_signature(
|
pub async fn check_payload_signature(
|
||||||
garage: &Garage,
|
garage: &Garage,
|
||||||
@ -105,7 +105,7 @@ fn parse_authorization(
|
|||||||
let (auth_kind, rest) = authorization.split_at(first_space);
|
let (auth_kind, rest) = authorization.split_at(first_space);
|
||||||
|
|
||||||
if auth_kind != "AWS4-HMAC-SHA256" {
|
if auth_kind != "AWS4-HMAC-SHA256" {
|
||||||
return Err(Error::BadRequest("Unsupported authorization method".into()));
|
return Err(Error::bad_request("Unsupported authorization method"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut auth_params = HashMap::new();
|
let mut auth_params = HashMap::new();
|
||||||
@ -129,10 +129,11 @@ fn parse_authorization(
|
|||||||
let date = headers
|
let date = headers
|
||||||
.get("x-amz-date")
|
.get("x-amz-date")
|
||||||
.ok_or_bad_request("Missing X-Amz-Date field")
|
.ok_or_bad_request("Missing X-Amz-Date field")
|
||||||
|
.map_err(Error::from)
|
||||||
.and_then(|d| parse_date(d))?;
|
.and_then(|d| parse_date(d))?;
|
||||||
|
|
||||||
if Utc::now() - date > Duration::hours(24) {
|
if Utc::now() - date > Duration::hours(24) {
|
||||||
return Err(Error::BadRequest("Date is too old".to_string()));
|
return Err(Error::bad_request("Date is too old".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let auth = Authorization {
|
let auth = Authorization {
|
||||||
@ -156,7 +157,7 @@ fn parse_query_authorization(
|
|||||||
headers: &HashMap<String, String>,
|
headers: &HashMap<String, String>,
|
||||||
) -> Result<Authorization, Error> {
|
) -> Result<Authorization, Error> {
|
||||||
if algorithm != "AWS4-HMAC-SHA256" {
|
if algorithm != "AWS4-HMAC-SHA256" {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"Unsupported authorization method".to_string(),
|
"Unsupported authorization method".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@ -179,10 +180,10 @@ fn parse_query_authorization(
|
|||||||
.get("x-amz-expires")
|
.get("x-amz-expires")
|
||||||
.ok_or_bad_request("X-Amz-Expires not found in query parameters")?
|
.ok_or_bad_request("X-Amz-Expires not found in query parameters")?
|
||||||
.parse()
|
.parse()
|
||||||
.map_err(|_| Error::BadRequest("X-Amz-Expires is not a number".to_string()))?;
|
.map_err(|_| Error::bad_request("X-Amz-Expires is not a number".to_string()))?;
|
||||||
|
|
||||||
if duration > 7 * 24 * 3600 {
|
if duration > 7 * 24 * 3600 {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::bad_request(
|
||||||
"X-Amz-Exprires may not exceed a week".to_string(),
|
"X-Amz-Exprires may not exceed a week".to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@ -190,10 +191,11 @@ fn parse_query_authorization(
|
|||||||
let date = headers
|
let date = headers
|
||||||
.get("x-amz-date")
|
.get("x-amz-date")
|
||||||
.ok_or_bad_request("Missing X-Amz-Date field")
|
.ok_or_bad_request("Missing X-Amz-Date field")
|
||||||
|
.map_err(Error::from)
|
||||||
.and_then(|d| parse_date(d))?;
|
.and_then(|d| parse_date(d))?;
|
||||||
|
|
||||||
if Utc::now() - date > Duration::seconds(duration) {
|
if Utc::now() - date > Duration::seconds(duration) {
|
||||||
return Err(Error::BadRequest("Date is too old".to_string()));
|
return Err(Error::bad_request("Date is too old".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Authorization {
|
Ok(Authorization {
|
||||||
@ -301,7 +303,7 @@ pub async fn verify_v4(
|
|||||||
.get(&EmptyKey, &key_id)
|
.get(&EmptyKey, &key_id)
|
||||||
.await?
|
.await?
|
||||||
.filter(|k| !k.state.is_deleted())
|
.filter(|k| !k.state.is_deleted())
|
||||||
.ok_or_else(|| Error::Forbidden(format!("No such key: {}", &key_id)))?;
|
.ok_or_else(|| Error::forbidden(format!("No such key: {}", &key_id)))?;
|
||||||
let key_p = key.params().unwrap();
|
let key_p = key.params().unwrap();
|
||||||
|
|
||||||
let mut hmac = signing_hmac(
|
let mut hmac = signing_hmac(
|
||||||
@ -314,7 +316,7 @@ pub async fn verify_v4(
|
|||||||
hmac.update(payload);
|
hmac.update(payload);
|
||||||
let our_signature = hex::encode(hmac.finalize().into_bytes());
|
let our_signature = hex::encode(hmac.finalize().into_bytes());
|
||||||
if signature != our_signature {
|
if signature != our_signature {
|
||||||
return Err(Error::Forbidden("Invalid signature".to_string()));
|
return Err(Error::forbidden("Invalid signature".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(key)
|
Ok(key)
|
||||||
|
@ -12,7 +12,7 @@ use garage_util::data::Hash;
|
|||||||
|
|
||||||
use super::{compute_scope, sha256sum, HmacSha256, LONG_DATETIME};
|
use super::{compute_scope, sha256sum, HmacSha256, LONG_DATETIME};
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::signature::error::*;
|
||||||
|
|
||||||
pub fn parse_streaming_body(
|
pub fn parse_streaming_body(
|
||||||
api_key: &Key,
|
api_key: &Key,
|
||||||
@ -87,7 +87,7 @@ fn compute_streaming_payload_signature(
|
|||||||
let mut hmac = signing_hmac.clone();
|
let mut hmac = signing_hmac.clone();
|
||||||
hmac.update(string_to_sign.as_bytes());
|
hmac.update(string_to_sign.as_bytes());
|
||||||
|
|
||||||
Hash::try_from(&hmac.finalize().into_bytes()).ok_or_internal_error("Invalid signature")
|
Ok(Hash::try_from(&hmac.finalize().into_bytes()).ok_or_internal_error("Invalid signature")?)
|
||||||
}
|
}
|
||||||
|
|
||||||
mod payload {
|
mod payload {
|
||||||
@ -163,10 +163,10 @@ impl From<SignedPayloadStreamError> for Error {
|
|||||||
match err {
|
match err {
|
||||||
SignedPayloadStreamError::Stream(e) => e,
|
SignedPayloadStreamError::Stream(e) => e,
|
||||||
SignedPayloadStreamError::InvalidSignature => {
|
SignedPayloadStreamError::InvalidSignature => {
|
||||||
Error::BadRequest("Invalid payload signature".into())
|
Error::bad_request("Invalid payload signature")
|
||||||
}
|
}
|
||||||
SignedPayloadStreamError::Message(e) => {
|
SignedPayloadStreamError::Message(e) => {
|
||||||
Error::BadRequest(format!("Chunk format error: {}", e))
|
Error::bad_request(format!("Chunk format error: {}", e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,10 +27,8 @@ garage_rpc = { version = "0.7.0", path = "../rpc" }
|
|||||||
garage_table = { version = "0.7.0", path = "../table" }
|
garage_table = { version = "0.7.0", path = "../table" }
|
||||||
garage_util = { version = "0.7.0", path = "../util" }
|
garage_util = { version = "0.7.0", path = "../util" }
|
||||||
garage_web = { version = "0.7.0", path = "../web" }
|
garage_web = { version = "0.7.0", path = "../web" }
|
||||||
garage_admin = { version = "0.7.0", path = "../admin" }
|
|
||||||
|
|
||||||
bytes = "1.0"
|
bytes = "1.0"
|
||||||
git-version = "0.3.4"
|
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
tracing = { version = "0.1.30", features = ["log-always"] }
|
tracing = { version = "0.1.30", features = ["log-always"] }
|
||||||
pretty_env_logger = "0.4"
|
pretty_env_logger = "0.4"
|
||||||
@ -54,6 +52,11 @@ tokio = { version = "1.0", default-features = false, features = ["rt", "rt-multi
|
|||||||
#netapp = { version = "0.4", path = "../../../netapp" }
|
#netapp = { version = "0.4", path = "../../../netapp" }
|
||||||
netapp = "0.4"
|
netapp = "0.4"
|
||||||
|
|
||||||
|
opentelemetry = { version = "0.17", features = [ "rt-tokio" ] }
|
||||||
|
opentelemetry-prometheus = "0.10"
|
||||||
|
opentelemetry-otlp = "0.10"
|
||||||
|
prometheus = "0.13"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
aws-sdk-s3 = "0.8"
|
aws-sdk-s3 = "0.8"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
|
@ -22,7 +22,6 @@ use garage_model::helper::error::{Error, OkOrBadRequest};
|
|||||||
use garage_model::key_table::*;
|
use garage_model::key_table::*;
|
||||||
use garage_model::migrate::Migrate;
|
use garage_model::migrate::Migrate;
|
||||||
use garage_model::permission::*;
|
use garage_model::permission::*;
|
||||||
use garage_model::s3::object_table::ObjectFilter;
|
|
||||||
|
|
||||||
use crate::cli::*;
|
use crate::cli::*;
|
||||||
use crate::repair::Repair;
|
use crate::repair::Repair;
|
||||||
@ -213,18 +212,7 @@ impl AdminRpcHandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check bucket is empty
|
// Check bucket is empty
|
||||||
let objects = self
|
if !helper.is_bucket_empty(bucket_id).await? {
|
||||||
.garage
|
|
||||||
.object_table
|
|
||||||
.get_range(
|
|
||||||
&bucket_id,
|
|
||||||
None,
|
|
||||||
Some(ObjectFilter::IsData),
|
|
||||||
10,
|
|
||||||
EnumerationOrder::Forward,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
if !objects.is_empty() {
|
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::BadRequest(format!(
|
||||||
"Bucket {} is not empty",
|
"Bucket {} is not empty",
|
||||||
query.name
|
query.name
|
||||||
@ -261,6 +249,7 @@ impl AdminRpcHandler {
|
|||||||
|
|
||||||
async fn handle_alias_bucket(&self, query: &AliasBucketOpt) -> Result<AdminRpc, Error> {
|
async fn handle_alias_bucket(&self, query: &AliasBucketOpt) -> Result<AdminRpc, Error> {
|
||||||
let helper = self.garage.bucket_helper();
|
let helper = self.garage.bucket_helper();
|
||||||
|
let key_helper = self.garage.key_helper();
|
||||||
|
|
||||||
let bucket_id = helper
|
let bucket_id = helper
|
||||||
.resolve_global_bucket_name(&query.existing_bucket)
|
.resolve_global_bucket_name(&query.existing_bucket)
|
||||||
@ -268,7 +257,7 @@ impl AdminRpcHandler {
|
|||||||
.ok_or_bad_request("Bucket not found")?;
|
.ok_or_bad_request("Bucket not found")?;
|
||||||
|
|
||||||
if let Some(key_pattern) = &query.local {
|
if let Some(key_pattern) = &query.local {
|
||||||
let key = helper.get_existing_matching_key(key_pattern).await?;
|
let key = key_helper.get_existing_matching_key(key_pattern).await?;
|
||||||
|
|
||||||
helper
|
helper
|
||||||
.set_local_bucket_alias(bucket_id, &key.key_id, &query.new_name)
|
.set_local_bucket_alias(bucket_id, &key.key_id, &query.new_name)
|
||||||
@ -290,9 +279,10 @@ impl AdminRpcHandler {
|
|||||||
|
|
||||||
async fn handle_unalias_bucket(&self, query: &UnaliasBucketOpt) -> Result<AdminRpc, Error> {
|
async fn handle_unalias_bucket(&self, query: &UnaliasBucketOpt) -> Result<AdminRpc, Error> {
|
||||||
let helper = self.garage.bucket_helper();
|
let helper = self.garage.bucket_helper();
|
||||||
|
let key_helper = self.garage.key_helper();
|
||||||
|
|
||||||
if let Some(key_pattern) = &query.local {
|
if let Some(key_pattern) = &query.local {
|
||||||
let key = helper.get_existing_matching_key(key_pattern).await?;
|
let key = key_helper.get_existing_matching_key(key_pattern).await?;
|
||||||
|
|
||||||
let bucket_id = key
|
let bucket_id = key
|
||||||
.state
|
.state
|
||||||
@ -331,12 +321,15 @@ impl AdminRpcHandler {
|
|||||||
|
|
||||||
async fn handle_bucket_allow(&self, query: &PermBucketOpt) -> Result<AdminRpc, Error> {
|
async fn handle_bucket_allow(&self, query: &PermBucketOpt) -> Result<AdminRpc, Error> {
|
||||||
let helper = self.garage.bucket_helper();
|
let helper = self.garage.bucket_helper();
|
||||||
|
let key_helper = self.garage.key_helper();
|
||||||
|
|
||||||
let bucket_id = helper
|
let bucket_id = helper
|
||||||
.resolve_global_bucket_name(&query.bucket)
|
.resolve_global_bucket_name(&query.bucket)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_bad_request("Bucket not found")?;
|
.ok_or_bad_request("Bucket not found")?;
|
||||||
let key = helper.get_existing_matching_key(&query.key_pattern).await?;
|
let key = key_helper
|
||||||
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let allow_read = query.read || key.allow_read(&bucket_id);
|
let allow_read = query.read || key.allow_read(&bucket_id);
|
||||||
let allow_write = query.write || key.allow_write(&bucket_id);
|
let allow_write = query.write || key.allow_write(&bucket_id);
|
||||||
@ -363,12 +356,15 @@ impl AdminRpcHandler {
|
|||||||
|
|
||||||
async fn handle_bucket_deny(&self, query: &PermBucketOpt) -> Result<AdminRpc, Error> {
|
async fn handle_bucket_deny(&self, query: &PermBucketOpt) -> Result<AdminRpc, Error> {
|
||||||
let helper = self.garage.bucket_helper();
|
let helper = self.garage.bucket_helper();
|
||||||
|
let key_helper = self.garage.key_helper();
|
||||||
|
|
||||||
let bucket_id = helper
|
let bucket_id = helper
|
||||||
.resolve_global_bucket_name(&query.bucket)
|
.resolve_global_bucket_name(&query.bucket)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_bad_request("Bucket not found")?;
|
.ok_or_bad_request("Bucket not found")?;
|
||||||
let key = helper.get_existing_matching_key(&query.key_pattern).await?;
|
let key = key_helper
|
||||||
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let allow_read = !query.read && key.allow_read(&bucket_id);
|
let allow_read = !query.read && key.allow_read(&bucket_id);
|
||||||
let allow_write = !query.write && key.allow_write(&bucket_id);
|
let allow_write = !query.write && key.allow_write(&bucket_id);
|
||||||
@ -469,7 +465,7 @@ impl AdminRpcHandler {
|
|||||||
async fn handle_key_info(&self, query: &KeyOpt) -> Result<AdminRpc, Error> {
|
async fn handle_key_info(&self, query: &KeyOpt) -> Result<AdminRpc, Error> {
|
||||||
let key = self
|
let key = self
|
||||||
.garage
|
.garage
|
||||||
.bucket_helper()
|
.key_helper()
|
||||||
.get_existing_matching_key(&query.key_pattern)
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
.await?;
|
.await?;
|
||||||
self.key_info_result(key).await
|
self.key_info_result(key).await
|
||||||
@ -484,7 +480,7 @@ impl AdminRpcHandler {
|
|||||||
async fn handle_rename_key(&self, query: &KeyRenameOpt) -> Result<AdminRpc, Error> {
|
async fn handle_rename_key(&self, query: &KeyRenameOpt) -> Result<AdminRpc, Error> {
|
||||||
let mut key = self
|
let mut key = self
|
||||||
.garage
|
.garage
|
||||||
.bucket_helper()
|
.key_helper()
|
||||||
.get_existing_matching_key(&query.key_pattern)
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
.await?;
|
.await?;
|
||||||
key.params_mut()
|
key.params_mut()
|
||||||
@ -496,9 +492,11 @@ impl AdminRpcHandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_delete_key(&self, query: &KeyDeleteOpt) -> Result<AdminRpc, Error> {
|
async fn handle_delete_key(&self, query: &KeyDeleteOpt) -> Result<AdminRpc, Error> {
|
||||||
let helper = self.garage.bucket_helper();
|
let key_helper = self.garage.key_helper();
|
||||||
|
|
||||||
let mut key = helper.get_existing_matching_key(&query.key_pattern).await?;
|
let mut key = key_helper
|
||||||
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !query.yes {
|
if !query.yes {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::BadRequest(
|
||||||
@ -506,32 +504,7 @@ impl AdminRpcHandler {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let state = key.state.as_option_mut().unwrap();
|
key_helper.delete_key(&mut key).await?;
|
||||||
|
|
||||||
// --- done checking, now commit ---
|
|
||||||
// (the step at unset_local_bucket_alias will fail if a bucket
|
|
||||||
// does not have another alias, the deletion will be
|
|
||||||
// interrupted in the middle if that happens)
|
|
||||||
|
|
||||||
// 1. Delete local aliases
|
|
||||||
for (alias, _, to) in state.local_aliases.items().iter() {
|
|
||||||
if let Some(bucket_id) = to {
|
|
||||||
helper
|
|
||||||
.unset_local_bucket_alias(*bucket_id, &key.key_id, alias)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Remove permissions on all authorized buckets
|
|
||||||
for (ab_id, _auth) in state.authorized_buckets.items().iter() {
|
|
||||||
helper
|
|
||||||
.set_bucket_key_permissions(*ab_id, &key.key_id, BucketKeyPerm::NO_PERMISSIONS)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Actually delete key
|
|
||||||
key.state = Deletable::delete();
|
|
||||||
self.garage.key_table.insert(&key).await?;
|
|
||||||
|
|
||||||
Ok(AdminRpc::Ok(format!(
|
Ok(AdminRpc::Ok(format!(
|
||||||
"Key {} was deleted successfully.",
|
"Key {} was deleted successfully.",
|
||||||
@ -542,7 +515,7 @@ impl AdminRpcHandler {
|
|||||||
async fn handle_allow_key(&self, query: &KeyPermOpt) -> Result<AdminRpc, Error> {
|
async fn handle_allow_key(&self, query: &KeyPermOpt) -> Result<AdminRpc, Error> {
|
||||||
let mut key = self
|
let mut key = self
|
||||||
.garage
|
.garage
|
||||||
.bucket_helper()
|
.key_helper()
|
||||||
.get_existing_matching_key(&query.key_pattern)
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
.await?;
|
.await?;
|
||||||
if query.create_bucket {
|
if query.create_bucket {
|
||||||
@ -555,7 +528,7 @@ impl AdminRpcHandler {
|
|||||||
async fn handle_deny_key(&self, query: &KeyPermOpt) -> Result<AdminRpc, Error> {
|
async fn handle_deny_key(&self, query: &KeyPermOpt) -> Result<AdminRpc, Error> {
|
||||||
let mut key = self
|
let mut key = self
|
||||||
.garage
|
.garage
|
||||||
.bucket_helper()
|
.key_helper()
|
||||||
.get_existing_matching_key(&query.key_pattern)
|
.get_existing_matching_key(&query.key_pattern)
|
||||||
.await?;
|
.await?;
|
||||||
if query.create_bucket {
|
if query.create_bucket {
|
||||||
@ -696,11 +669,7 @@ impl AdminRpcHandler {
|
|||||||
writeln!(
|
writeln!(
|
||||||
&mut ret,
|
&mut ret,
|
||||||
"\nGarage version: {}",
|
"\nGarage version: {}",
|
||||||
option_env!("GIT_VERSION").unwrap_or(git_version::git_version!(
|
self.garage.system.garage_version(),
|
||||||
prefix = "git:",
|
|
||||||
cargo_prefix = "cargo:",
|
|
||||||
fallback = "unknown"
|
|
||||||
))
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use garage_util::crdt::Crdt;
|
use garage_util::crdt::Crdt;
|
||||||
use garage_util::data::*;
|
|
||||||
use garage_util::error::*;
|
use garage_util::error::*;
|
||||||
use garage_util::formater::format_table;
|
use garage_util::formater::format_table;
|
||||||
|
|
||||||
@ -212,31 +211,9 @@ pub async fn cmd_apply_layout(
|
|||||||
rpc_host: NodeID,
|
rpc_host: NodeID,
|
||||||
apply_opt: ApplyLayoutOpt,
|
apply_opt: ApplyLayoutOpt,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let mut layout = fetch_layout(rpc_cli, rpc_host).await?;
|
let layout = fetch_layout(rpc_cli, rpc_host).await?;
|
||||||
|
|
||||||
match apply_opt.version {
|
let layout = layout.apply_staged_changes(apply_opt.version)?;
|
||||||
None => {
|
|
||||||
println!("Please pass the --version flag to ensure that you are writing the correct version of the cluster layout.");
|
|
||||||
println!("To know the correct value of the --version flag, invoke `garage layout show` and review the proposed changes.");
|
|
||||||
return Err(Error::Message("--version flag is missing".into()));
|
|
||||||
}
|
|
||||||
Some(v) => {
|
|
||||||
if v != layout.version + 1 {
|
|
||||||
return Err(Error::Message("Invalid value of --version flag".into()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
layout.roles.merge(&layout.staging);
|
|
||||||
|
|
||||||
if !layout.calculate_partition_assignation() {
|
|
||||||
return Err(Error::Message("Could not calculate new assignation of partitions to nodes. This can happen if there are less nodes than the desired number of copies of your data (see the replication_mode configuration parameter).".into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
layout.staging.clear();
|
|
||||||
layout.staging_hash = blake2sum(&rmp_to_vec_all_named(&layout.staging).unwrap()[..]);
|
|
||||||
|
|
||||||
layout.version += 1;
|
|
||||||
|
|
||||||
send_layout(rpc_cli, rpc_host, layout).await?;
|
send_layout(rpc_cli, rpc_host, layout).await?;
|
||||||
|
|
||||||
@ -251,25 +228,9 @@ pub async fn cmd_revert_layout(
|
|||||||
rpc_host: NodeID,
|
rpc_host: NodeID,
|
||||||
revert_opt: RevertLayoutOpt,
|
revert_opt: RevertLayoutOpt,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let mut layout = fetch_layout(rpc_cli, rpc_host).await?;
|
let layout = fetch_layout(rpc_cli, rpc_host).await?;
|
||||||
|
|
||||||
match revert_opt.version {
|
let layout = layout.revert_staged_changes(revert_opt.version)?;
|
||||||
None => {
|
|
||||||
println!("Please pass the --version flag to ensure that you are writing the correct version of the cluster layout.");
|
|
||||||
println!("To know the correct value of the --version flag, invoke `garage layout show` and review the proposed changes.");
|
|
||||||
return Err(Error::Message("--version flag is missing".into()));
|
|
||||||
}
|
|
||||||
Some(v) => {
|
|
||||||
if v != layout.version + 1 {
|
|
||||||
return Err(Error::Message("Invalid value of --version flag".into()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
layout.staging.clear();
|
|
||||||
layout.staging_hash = blake2sum(&rmp_to_vec_all_named(&layout.staging).unwrap()[..]);
|
|
||||||
|
|
||||||
layout.version += 1;
|
|
||||||
|
|
||||||
send_layout(rpc_cli, rpc_host, layout).await?;
|
send_layout(rpc_cli, rpc_host, layout).await?;
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ mod admin;
|
|||||||
mod cli;
|
mod cli;
|
||||||
mod repair;
|
mod repair;
|
||||||
mod server;
|
mod server;
|
||||||
|
mod tracing_setup;
|
||||||
|
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -141,6 +142,7 @@ async fn cli_command(opt: Opt) -> Result<(), Error> {
|
|||||||
match cli_command_dispatch(opt.cmd, &system_rpc_endpoint, &admin_rpc_endpoint, id).await {
|
match cli_command_dispatch(opt.cmd, &system_rpc_endpoint, &admin_rpc_endpoint, id).await {
|
||||||
Err(HelperError::Internal(i)) => Err(Error::Message(format!("Internal error: {}", i))),
|
Err(HelperError::Internal(i)) => Err(Error::Message(format!("Internal error: {}", i))),
|
||||||
Err(HelperError::BadRequest(b)) => Err(Error::Message(b)),
|
Err(HelperError::BadRequest(b)) => Err(Error::Message(b)),
|
||||||
|
Err(e) => Err(Error::Message(format!("{}", e))),
|
||||||
Ok(x) => Ok(x),
|
Ok(x) => Ok(x),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,8 +6,7 @@ use garage_util::background::*;
|
|||||||
use garage_util::config::*;
|
use garage_util::config::*;
|
||||||
use garage_util::error::Error;
|
use garage_util::error::Error;
|
||||||
|
|
||||||
use garage_admin::metrics::*;
|
use garage_api::admin::api_server::AdminApiServer;
|
||||||
use garage_admin::tracing_setup::*;
|
|
||||||
use garage_api::s3::api_server::S3ApiServer;
|
use garage_api::s3::api_server::S3ApiServer;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_web::run_web_server;
|
use garage_web::run_web_server;
|
||||||
@ -16,6 +15,7 @@ use garage_web::run_web_server;
|
|||||||
use garage_api::k2v::api_server::K2VApiServer;
|
use garage_api::k2v::api_server::K2VApiServer;
|
||||||
|
|
||||||
use crate::admin::*;
|
use crate::admin::*;
|
||||||
|
use crate::tracing_setup::*;
|
||||||
|
|
||||||
async fn wait_from(mut chan: watch::Receiver<bool>) {
|
async fn wait_from(mut chan: watch::Receiver<bool>) {
|
||||||
while !*chan.borrow() {
|
while !*chan.borrow() {
|
||||||
@ -39,9 +39,6 @@ pub async fn run_server(config_file: PathBuf) -> Result<(), Error> {
|
|||||||
.open()
|
.open()
|
||||||
.expect("Unable to open sled DB");
|
.expect("Unable to open sled DB");
|
||||||
|
|
||||||
info!("Initialize admin web server and metric backend...");
|
|
||||||
let admin_server_init = AdminServer::init();
|
|
||||||
|
|
||||||
info!("Initializing background runner...");
|
info!("Initializing background runner...");
|
||||||
let watch_cancel = netapp::util::watch_ctrl_c();
|
let watch_cancel = netapp::util::watch_ctrl_c();
|
||||||
let (background, await_background_done) = BackgroundRunner::new(16, watch_cancel.clone());
|
let (background, await_background_done) = BackgroundRunner::new(16, watch_cancel.clone());
|
||||||
@ -54,6 +51,9 @@ pub async fn run_server(config_file: PathBuf) -> Result<(), Error> {
|
|||||||
init_tracing(&export_to, garage.system.id)?;
|
init_tracing(&export_to, garage.system.id)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
info!("Initialize Admin API server and metrics collector...");
|
||||||
|
let admin_server = AdminApiServer::new(garage.clone());
|
||||||
|
|
||||||
let run_system = tokio::spawn(garage.system.clone().run(watch_cancel.clone()));
|
let run_system = tokio::spawn(garage.system.clone().run(watch_cancel.clone()));
|
||||||
|
|
||||||
info!("Create admin RPC handler...");
|
info!("Create admin RPC handler...");
|
||||||
@ -80,39 +80,41 @@ pub async fn run_server(config_file: PathBuf) -> Result<(), Error> {
|
|||||||
wait_from(watch_cancel.clone()),
|
wait_from(watch_cancel.clone()),
|
||||||
));
|
));
|
||||||
|
|
||||||
let admin_server = if let Some(admin_bind_addr) = config.admin.api_bind_addr {
|
info!("Launching Admin API server...");
|
||||||
info!("Configure and run admin web server...");
|
let admin_server = tokio::spawn(admin_server.run(wait_from(watch_cancel.clone())));
|
||||||
Some(tokio::spawn(
|
|
||||||
admin_server_init.run(admin_bind_addr, wait_from(watch_cancel.clone())),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Stuff runs
|
// Stuff runs
|
||||||
|
|
||||||
// When a cancel signal is sent, stuff stops
|
// When a cancel signal is sent, stuff stops
|
||||||
if let Err(e) = s3_api_server.await? {
|
if let Err(e) = s3_api_server.await? {
|
||||||
warn!("S3 API server exited with error: {}", e);
|
warn!("S3 API server exited with error: {}", e);
|
||||||
|
} else {
|
||||||
|
info!("S3 API server exited without error.");
|
||||||
}
|
}
|
||||||
#[cfg(feature = "k2v")]
|
#[cfg(feature = "k2v")]
|
||||||
if let Err(e) = k2v_api_server.await? {
|
if let Err(e) = k2v_api_server.await? {
|
||||||
warn!("K2V API server exited with error: {}", e);
|
warn!("K2V API server exited with error: {}", e);
|
||||||
|
} else {
|
||||||
|
info!("K2V API server exited without error.");
|
||||||
}
|
}
|
||||||
if let Err(e) = web_server.await? {
|
if let Err(e) = web_server.await? {
|
||||||
warn!("Web server exited with error: {}", e);
|
warn!("Web server exited with error: {}", e);
|
||||||
|
} else {
|
||||||
|
info!("Web server exited without error.");
|
||||||
}
|
}
|
||||||
if let Some(a) = admin_server {
|
if let Err(e) = admin_server.await? {
|
||||||
if let Err(e) = a.await? {
|
warn!("Admin web server exited with error: {}", e);
|
||||||
warn!("Admin web server exited with error: {}", e);
|
} else {
|
||||||
}
|
info!("Admin API server exited without error.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove RPC handlers for system to break reference cycles
|
// Remove RPC handlers for system to break reference cycles
|
||||||
garage.system.netapp.drop_all_handlers();
|
garage.system.netapp.drop_all_handlers();
|
||||||
|
opentelemetry::global::shutdown_tracer_provider();
|
||||||
|
|
||||||
// Await for netapp RPC system to end
|
// Await for netapp RPC system to end
|
||||||
run_system.await?;
|
run_system.await?;
|
||||||
|
info!("Netapp exited");
|
||||||
|
|
||||||
// Drop all references so that stuff can terminate properly
|
// Drop all references so that stuff can terminate properly
|
||||||
drop(garage);
|
drop(garage);
|
||||||
|
@ -191,6 +191,10 @@ impl Garage {
|
|||||||
pub fn bucket_helper(&self) -> helper::bucket::BucketHelper {
|
pub fn bucket_helper(&self) -> helper::bucket::BucketHelper {
|
||||||
helper::bucket::BucketHelper(self)
|
helper::bucket::BucketHelper(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn key_helper(&self) -> helper::key::KeyHelper {
|
||||||
|
helper::key::KeyHelper(self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "k2v")]
|
#[cfg(feature = "k2v")]
|
||||||
|
@ -1,15 +1,18 @@
|
|||||||
use garage_table::util::*;
|
|
||||||
use garage_util::crdt::*;
|
use garage_util::crdt::*;
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
use garage_util::error::{Error as GarageError, OkOrMessage};
|
use garage_util::error::{Error as GarageError, OkOrMessage};
|
||||||
use garage_util::time::*;
|
use garage_util::time::*;
|
||||||
|
|
||||||
|
use garage_table::util::*;
|
||||||
|
|
||||||
use crate::bucket_alias_table::*;
|
use crate::bucket_alias_table::*;
|
||||||
use crate::bucket_table::*;
|
use crate::bucket_table::*;
|
||||||
use crate::garage::Garage;
|
use crate::garage::Garage;
|
||||||
use crate::helper::error::*;
|
use crate::helper::error::*;
|
||||||
use crate::key_table::{Key, KeyFilter};
|
use crate::helper::key::KeyHelper;
|
||||||
|
use crate::key_table::*;
|
||||||
use crate::permission::BucketKeyPerm;
|
use crate::permission::BucketKeyPerm;
|
||||||
|
use crate::s3::object_table::ObjectFilter;
|
||||||
|
|
||||||
pub struct BucketHelper<'a>(pub(crate) &'a Garage);
|
pub struct BucketHelper<'a>(pub(crate) &'a Garage);
|
||||||
|
|
||||||
@ -49,6 +52,23 @@ impl<'a> BucketHelper<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::ptr_arg)]
|
||||||
|
pub async fn resolve_bucket(&self, bucket_name: &String, api_key: &Key) -> Result<Uuid, Error> {
|
||||||
|
let api_key_params = api_key
|
||||||
|
.state
|
||||||
|
.as_option()
|
||||||
|
.ok_or_message("Key should not be deleted at this point")?;
|
||||||
|
|
||||||
|
if let Some(Some(bucket_id)) = api_key_params.local_aliases.get(bucket_name) {
|
||||||
|
Ok(*bucket_id)
|
||||||
|
} else {
|
||||||
|
Ok(self
|
||||||
|
.resolve_global_bucket_name(bucket_name)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| Error::NoSuchBucket(bucket_name.to_string()))?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a Bucket if it is present in bucket table,
|
/// Returns a Bucket if it is present in bucket table,
|
||||||
/// even if it is in deleted state. Querying a non-existing
|
/// even if it is in deleted state. Querying a non-existing
|
||||||
/// bucket ID returns an internal error.
|
/// bucket ID returns an internal error.
|
||||||
@ -71,64 +91,7 @@ impl<'a> BucketHelper<'a> {
|
|||||||
.get(&EmptyKey, &bucket_id)
|
.get(&EmptyKey, &bucket_id)
|
||||||
.await?
|
.await?
|
||||||
.filter(|b| !b.is_deleted())
|
.filter(|b| !b.is_deleted())
|
||||||
.ok_or_bad_request(format!(
|
.ok_or_else(|| Error::NoSuchBucket(hex::encode(bucket_id)))
|
||||||
"Bucket {:?} does not exist or has been deleted",
|
|
||||||
bucket_id
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a Key if it is present in key table,
|
|
||||||
/// even if it is in deleted state. Querying a non-existing
|
|
||||||
/// key ID returns an internal error.
|
|
||||||
pub async fn get_internal_key(&self, key_id: &String) -> Result<Key, Error> {
|
|
||||||
Ok(self
|
|
||||||
.0
|
|
||||||
.key_table
|
|
||||||
.get(&EmptyKey, key_id)
|
|
||||||
.await?
|
|
||||||
.ok_or_message(format!("Key {} does not exist", key_id))?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a Key if it is present in key table,
|
|
||||||
/// only if it is in non-deleted state.
|
|
||||||
/// Querying a non-existing key ID or a deleted key
|
|
||||||
/// returns a bad request error.
|
|
||||||
pub async fn get_existing_key(&self, key_id: &String) -> Result<Key, Error> {
|
|
||||||
self.0
|
|
||||||
.key_table
|
|
||||||
.get(&EmptyKey, key_id)
|
|
||||||
.await?
|
|
||||||
.filter(|b| !b.state.is_deleted())
|
|
||||||
.ok_or_bad_request(format!("Key {} does not exist or has been deleted", key_id))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a Key if it is present in key table,
|
|
||||||
/// looking it up by key ID or by a match on its name,
|
|
||||||
/// only if it is in non-deleted state.
|
|
||||||
/// Querying a non-existing key ID or a deleted key
|
|
||||||
/// returns a bad request error.
|
|
||||||
pub async fn get_existing_matching_key(&self, pattern: &str) -> Result<Key, Error> {
|
|
||||||
let candidates = self
|
|
||||||
.0
|
|
||||||
.key_table
|
|
||||||
.get_range(
|
|
||||||
&EmptyKey,
|
|
||||||
None,
|
|
||||||
Some(KeyFilter::MatchesAndNotDeleted(pattern.to_string())),
|
|
||||||
10,
|
|
||||||
EnumerationOrder::Forward,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if candidates.len() != 1 {
|
|
||||||
Err(Error::BadRequest(format!(
|
|
||||||
"{} matching keys",
|
|
||||||
candidates.len()
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
Ok(candidates.into_iter().next().unwrap())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets a new alias for a bucket in global namespace.
|
/// Sets a new alias for a bucket in global namespace.
|
||||||
@ -142,10 +105,7 @@ impl<'a> BucketHelper<'a> {
|
|||||||
alias_name: &String,
|
alias_name: &String,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
if !is_valid_bucket_name(alias_name) {
|
if !is_valid_bucket_name(alias_name) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::InvalidBucketName(alias_name.to_string()));
|
||||||
"{}: {}",
|
|
||||||
alias_name, INVALID_BUCKET_NAME_MESSAGE
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
||||||
@ -176,7 +136,7 @@ impl<'a> BucketHelper<'a> {
|
|||||||
|
|
||||||
let alias = match alias {
|
let alias = match alias {
|
||||||
None => BucketAlias::new(alias_name.clone(), alias_ts, Some(bucket_id))
|
None => BucketAlias::new(alias_name.clone(), alias_ts, Some(bucket_id))
|
||||||
.ok_or_bad_request(format!("{}: {}", alias_name, INVALID_BUCKET_NAME_MESSAGE))?,
|
.ok_or_else(|| Error::InvalidBucketName(alias_name.clone()))?,
|
||||||
Some(mut a) => {
|
Some(mut a) => {
|
||||||
a.state = Lww::raw(alias_ts, Some(bucket_id));
|
a.state = Lww::raw(alias_ts, Some(bucket_id));
|
||||||
a
|
a
|
||||||
@ -264,7 +224,7 @@ impl<'a> BucketHelper<'a> {
|
|||||||
.bucket_alias_table
|
.bucket_alias_table
|
||||||
.get(&EmptyKey, alias_name)
|
.get(&EmptyKey, alias_name)
|
||||||
.await?
|
.await?
|
||||||
.ok_or_message(format!("Alias {} not found", alias_name))?;
|
.ok_or_else(|| Error::NoSuchBucket(alias_name.to_string()))?;
|
||||||
|
|
||||||
// Checks ok, remove alias
|
// Checks ok, remove alias
|
||||||
let alias_ts = match bucket.state.as_option() {
|
let alias_ts = match bucket.state.as_option() {
|
||||||
@ -303,15 +263,14 @@ impl<'a> BucketHelper<'a> {
|
|||||||
key_id: &String,
|
key_id: &String,
|
||||||
alias_name: &String,
|
alias_name: &String,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
|
let key_helper = KeyHelper(self.0);
|
||||||
|
|
||||||
if !is_valid_bucket_name(alias_name) {
|
if !is_valid_bucket_name(alias_name) {
|
||||||
return Err(Error::BadRequest(format!(
|
return Err(Error::InvalidBucketName(alias_name.to_string()));
|
||||||
"{}: {}",
|
|
||||||
alias_name, INVALID_BUCKET_NAME_MESSAGE
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
||||||
let mut key = self.get_existing_key(key_id).await?;
|
let mut key = key_helper.get_existing_key(key_id).await?;
|
||||||
|
|
||||||
let mut key_param = key.state.as_option_mut().unwrap();
|
let mut key_param = key.state.as_option_mut().unwrap();
|
||||||
|
|
||||||
@ -360,8 +319,10 @@ impl<'a> BucketHelper<'a> {
|
|||||||
key_id: &String,
|
key_id: &String,
|
||||||
alias_name: &String,
|
alias_name: &String,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
|
let key_helper = KeyHelper(self.0);
|
||||||
|
|
||||||
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
let mut bucket = self.get_existing_bucket(bucket_id).await?;
|
||||||
let mut key = self.get_existing_key(key_id).await?;
|
let mut key = key_helper.get_existing_key(key_id).await?;
|
||||||
|
|
||||||
let mut bucket_p = bucket.state.as_option_mut().unwrap();
|
let mut bucket_p = bucket.state.as_option_mut().unwrap();
|
||||||
|
|
||||||
@ -429,8 +390,10 @@ impl<'a> BucketHelper<'a> {
|
|||||||
key_id: &String,
|
key_id: &String,
|
||||||
mut perm: BucketKeyPerm,
|
mut perm: BucketKeyPerm,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
|
let key_helper = KeyHelper(self.0);
|
||||||
|
|
||||||
let mut bucket = self.get_internal_bucket(bucket_id).await?;
|
let mut bucket = self.get_internal_bucket(bucket_id).await?;
|
||||||
let mut key = self.get_internal_key(key_id).await?;
|
let mut key = key_helper.get_internal_key(key_id).await?;
|
||||||
|
|
||||||
if let Some(bstate) = bucket.state.as_option() {
|
if let Some(bstate) = bucket.state.as_option() {
|
||||||
if let Some(kp) = bstate.authorized_keys.get(key_id) {
|
if let Some(kp) = bstate.authorized_keys.get(key_id) {
|
||||||
@ -466,4 +429,47 @@ impl<'a> BucketHelper<'a> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn is_bucket_empty(&self, bucket_id: Uuid) -> Result<bool, Error> {
|
||||||
|
let objects = self
|
||||||
|
.0
|
||||||
|
.object_table
|
||||||
|
.get_range(
|
||||||
|
&bucket_id,
|
||||||
|
None,
|
||||||
|
Some(ObjectFilter::IsData),
|
||||||
|
10,
|
||||||
|
EnumerationOrder::Forward,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
if !objects.is_empty() {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "k2v")]
|
||||||
|
{
|
||||||
|
use garage_rpc::ring::Ring;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
let ring: Arc<Ring> = self.0.system.ring.borrow().clone();
|
||||||
|
let k2vindexes = self
|
||||||
|
.0
|
||||||
|
.k2v
|
||||||
|
.counter_table
|
||||||
|
.table
|
||||||
|
.get_range(
|
||||||
|
&bucket_id,
|
||||||
|
None,
|
||||||
|
Some((DeletedFilter::NotDeleted, ring.layout.node_id_vec.clone())),
|
||||||
|
10,
|
||||||
|
EnumerationOrder::Forward,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
if !k2vindexes.is_empty() {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,16 @@ pub enum Error {
|
|||||||
|
|
||||||
#[error(display = "Bad request: {}", _0)]
|
#[error(display = "Bad request: {}", _0)]
|
||||||
BadRequest(String),
|
BadRequest(String),
|
||||||
|
|
||||||
|
/// Bucket name is not valid according to AWS S3 specs
|
||||||
|
#[error(display = "Invalid bucket name: {}", _0)]
|
||||||
|
InvalidBucketName(String),
|
||||||
|
|
||||||
|
#[error(display = "Access key not found: {}", _0)]
|
||||||
|
NoSuchAccessKey(String),
|
||||||
|
|
||||||
|
#[error(display = "Bucket not found: {}", _0)]
|
||||||
|
NoSuchBucket(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<netapp::error::Error> for Error {
|
impl From<netapp::error::Error> for Error {
|
||||||
|
102
src/model/helper/key.rs
Normal file
102
src/model/helper/key.rs
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
use garage_table::util::*;
|
||||||
|
use garage_util::crdt::*;
|
||||||
|
use garage_util::error::OkOrMessage;
|
||||||
|
|
||||||
|
use crate::garage::Garage;
|
||||||
|
use crate::helper::bucket::BucketHelper;
|
||||||
|
use crate::helper::error::*;
|
||||||
|
use crate::key_table::{Key, KeyFilter};
|
||||||
|
use crate::permission::BucketKeyPerm;
|
||||||
|
|
||||||
|
pub struct KeyHelper<'a>(pub(crate) &'a Garage);
|
||||||
|
|
||||||
|
#[allow(clippy::ptr_arg)]
|
||||||
|
impl<'a> KeyHelper<'a> {
|
||||||
|
/// Returns a Key if it is present in key table,
|
||||||
|
/// even if it is in deleted state. Querying a non-existing
|
||||||
|
/// key ID returns an internal error.
|
||||||
|
pub async fn get_internal_key(&self, key_id: &String) -> Result<Key, Error> {
|
||||||
|
Ok(self
|
||||||
|
.0
|
||||||
|
.key_table
|
||||||
|
.get(&EmptyKey, key_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_message(format!("Key {} does not exist", key_id))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Key if it is present in key table,
|
||||||
|
/// only if it is in non-deleted state.
|
||||||
|
/// Querying a non-existing key ID or a deleted key
|
||||||
|
/// returns a bad request error.
|
||||||
|
pub async fn get_existing_key(&self, key_id: &String) -> Result<Key, Error> {
|
||||||
|
self.0
|
||||||
|
.key_table
|
||||||
|
.get(&EmptyKey, key_id)
|
||||||
|
.await?
|
||||||
|
.filter(|b| !b.state.is_deleted())
|
||||||
|
.ok_or_else(|| Error::NoSuchAccessKey(key_id.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a Key if it is present in key table,
|
||||||
|
/// looking it up by key ID or by a match on its name,
|
||||||
|
/// only if it is in non-deleted state.
|
||||||
|
/// Querying a non-existing key ID or a deleted key
|
||||||
|
/// returns a bad request error.
|
||||||
|
pub async fn get_existing_matching_key(&self, pattern: &str) -> Result<Key, Error> {
|
||||||
|
let candidates = self
|
||||||
|
.0
|
||||||
|
.key_table
|
||||||
|
.get_range(
|
||||||
|
&EmptyKey,
|
||||||
|
None,
|
||||||
|
Some(KeyFilter::MatchesAndNotDeleted(pattern.to_string())),
|
||||||
|
10,
|
||||||
|
EnumerationOrder::Forward,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if candidates.len() != 1 {
|
||||||
|
Err(Error::BadRequest(format!(
|
||||||
|
"{} matching keys",
|
||||||
|
candidates.len()
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
Ok(candidates.into_iter().next().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes an API access key
|
||||||
|
pub async fn delete_key(&self, key: &mut Key) -> Result<(), Error> {
|
||||||
|
let bucket_helper = BucketHelper(self.0);
|
||||||
|
|
||||||
|
let state = key.state.as_option_mut().unwrap();
|
||||||
|
|
||||||
|
// --- done checking, now commit ---
|
||||||
|
// (the step at unset_local_bucket_alias will fail if a bucket
|
||||||
|
// does not have another alias, the deletion will be
|
||||||
|
// interrupted in the middle if that happens)
|
||||||
|
|
||||||
|
// 1. Delete local aliases
|
||||||
|
for (alias, _, to) in state.local_aliases.items().iter() {
|
||||||
|
if let Some(bucket_id) = to {
|
||||||
|
bucket_helper
|
||||||
|
.unset_local_bucket_alias(*bucket_id, &key.key_id, alias)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Remove permissions on all authorized buckets
|
||||||
|
for (ab_id, _auth) in state.authorized_buckets.items().iter() {
|
||||||
|
bucket_helper
|
||||||
|
.set_bucket_key_permissions(*ab_id, &key.key_id, BucketKeyPerm::NO_PERMISSIONS)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Actually delete key
|
||||||
|
key.state = Deletable::delete();
|
||||||
|
self.0.key_table.insert(key).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@ -1,2 +1,3 @@
|
|||||||
pub mod bucket;
|
pub mod bucket;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
pub mod key;
|
||||||
|
@ -15,11 +15,11 @@ path = "lib.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
garage_util = { version = "0.7.0", path = "../util" }
|
garage_util = { version = "0.7.0", path = "../util" }
|
||||||
garage_admin = { version = "0.7.0", path = "../admin" }
|
|
||||||
|
|
||||||
arc-swap = "1.0"
|
arc-swap = "1.0"
|
||||||
bytes = "1.0"
|
bytes = "1.0"
|
||||||
gethostname = "0.2"
|
gethostname = "0.2"
|
||||||
|
git-version = "0.3.4"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
tracing = "0.1.30"
|
tracing = "0.1.30"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
|
@ -5,6 +5,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use garage_util::crdt::{AutoCrdt, Crdt, LwwMap};
|
use garage_util::crdt::{AutoCrdt, Crdt, LwwMap};
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
use garage_util::error::*;
|
||||||
|
|
||||||
use crate::ring::*;
|
use crate::ring::*;
|
||||||
|
|
||||||
@ -100,6 +101,61 @@ impl ClusterLayout {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn apply_staged_changes(mut self, version: Option<u64>) -> Result<Self, Error> {
|
||||||
|
match version {
|
||||||
|
None => {
|
||||||
|
let error = r#"
|
||||||
|
Please pass the new layout version number to ensure that you are writing the correct version of the cluster layout.
|
||||||
|
To know the correct value of the new layout version, invoke `garage layout show` and review the proposed changes.
|
||||||
|
"#;
|
||||||
|
return Err(Error::Message(error.into()));
|
||||||
|
}
|
||||||
|
Some(v) => {
|
||||||
|
if v != self.version + 1 {
|
||||||
|
return Err(Error::Message("Invalid new layout version".into()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.roles.merge(&self.staging);
|
||||||
|
self.roles.retain(|(_, _, v)| v.0.is_some());
|
||||||
|
|
||||||
|
if !self.calculate_partition_assignation() {
|
||||||
|
return Err(Error::Message("Could not calculate new assignation of partitions to nodes. This can happen if there are less nodes than the desired number of copies of your data (see the replication_mode configuration parameter).".into()));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.staging.clear();
|
||||||
|
self.staging_hash = blake2sum(&rmp_to_vec_all_named(&self.staging).unwrap()[..]);
|
||||||
|
|
||||||
|
self.version += 1;
|
||||||
|
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn revert_staged_changes(mut self, version: Option<u64>) -> Result<Self, Error> {
|
||||||
|
match version {
|
||||||
|
None => {
|
||||||
|
let error = r#"
|
||||||
|
Please pass the new layout version number to ensure that you are writing the correct version of the cluster layout.
|
||||||
|
To know the correct value of the new layout version, invoke `garage layout show` and review the proposed changes.
|
||||||
|
"#;
|
||||||
|
return Err(Error::Message(error.into()));
|
||||||
|
}
|
||||||
|
Some(v) => {
|
||||||
|
if v != self.version + 1 {
|
||||||
|
return Err(Error::Message("Invalid new layout version".into()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.staging.clear();
|
||||||
|
self.staging_hash = blake2sum(&rmp_to_vec_all_named(&self.staging).unwrap()[..]);
|
||||||
|
|
||||||
|
self.version += 1;
|
||||||
|
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a list of IDs of nodes that currently have
|
/// Returns a list of IDs of nodes that currently have
|
||||||
/// a role in the cluster
|
/// a role in the cluster
|
||||||
pub fn node_ids(&self) -> &[Uuid] {
|
pub fn node_ids(&self) -> &[Uuid] {
|
||||||
|
@ -312,6 +312,84 @@ impl System {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---- Administrative operations (directly available and
|
||||||
|
// also available through RPC) ----
|
||||||
|
|
||||||
|
pub fn garage_version(&self) -> &'static str {
|
||||||
|
option_env!("GIT_VERSION").unwrap_or(git_version::git_version!(
|
||||||
|
prefix = "git:",
|
||||||
|
cargo_prefix = "cargo:",
|
||||||
|
fallback = "unknown"
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_known_nodes(&self) -> Vec<KnownNodeInfo> {
|
||||||
|
let node_status = self.node_status.read().unwrap();
|
||||||
|
let known_nodes = self
|
||||||
|
.fullmesh
|
||||||
|
.get_peer_list()
|
||||||
|
.iter()
|
||||||
|
.map(|n| KnownNodeInfo {
|
||||||
|
id: n.id.into(),
|
||||||
|
addr: n.addr,
|
||||||
|
is_up: n.is_up(),
|
||||||
|
last_seen_secs_ago: n.last_seen.map(|t| (Instant::now() - t).as_secs()),
|
||||||
|
status: node_status
|
||||||
|
.get(&n.id.into())
|
||||||
|
.cloned()
|
||||||
|
.map(|(_, st)| st)
|
||||||
|
.unwrap_or(NodeStatus {
|
||||||
|
hostname: "?".to_string(),
|
||||||
|
replication_factor: 0,
|
||||||
|
cluster_layout_version: 0,
|
||||||
|
cluster_layout_staging_hash: Hash::from([0u8; 32]),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
known_nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_cluster_layout(&self) -> ClusterLayout {
|
||||||
|
self.ring.borrow().layout.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_cluster_layout(
|
||||||
|
self: &Arc<Self>,
|
||||||
|
layout: &ClusterLayout,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
self.handle_advertise_cluster_layout(layout).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn connect(&self, node: &str) -> Result<(), Error> {
|
||||||
|
let (pubkey, addrs) = parse_and_resolve_peer_addr(node).ok_or_else(|| {
|
||||||
|
Error::Message(format!(
|
||||||
|
"Unable to parse or resolve node specification: {}",
|
||||||
|
node
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
let mut errors = vec![];
|
||||||
|
for ip in addrs.iter() {
|
||||||
|
match self
|
||||||
|
.netapp
|
||||||
|
.clone()
|
||||||
|
.try_connect(*ip, pubkey)
|
||||||
|
.await
|
||||||
|
.err_context(CONNECT_ERROR_MESSAGE)
|
||||||
|
{
|
||||||
|
Ok(()) => return Ok(()),
|
||||||
|
Err(e) => {
|
||||||
|
errors.push((*ip, e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if errors.len() == 1 {
|
||||||
|
Err(Error::Message(errors[0].1.to_string()))
|
||||||
|
} else {
|
||||||
|
Err(Error::Message(format!("{:?}", errors)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ---- INTERNALS ----
|
// ---- INTERNALS ----
|
||||||
|
|
||||||
async fn advertise_to_consul(self: Arc<Self>) -> Result<(), Error> {
|
async fn advertise_to_consul(self: Arc<Self>) -> Result<(), Error> {
|
||||||
@ -384,32 +462,11 @@ impl System {
|
|||||||
self.local_status.swap(Arc::new(new_si));
|
self.local_status.swap(Arc::new(new_si));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- RPC HANDLERS ---
|
||||||
|
|
||||||
async fn handle_connect(&self, node: &str) -> Result<SystemRpc, Error> {
|
async fn handle_connect(&self, node: &str) -> Result<SystemRpc, Error> {
|
||||||
let (pubkey, addrs) = parse_and_resolve_peer_addr(node).ok_or_else(|| {
|
self.connect(node).await?;
|
||||||
Error::Message(format!(
|
Ok(SystemRpc::Ok)
|
||||||
"Unable to parse or resolve node specification: {}",
|
|
||||||
node
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
let mut errors = vec![];
|
|
||||||
for ip in addrs.iter() {
|
|
||||||
match self
|
|
||||||
.netapp
|
|
||||||
.clone()
|
|
||||||
.try_connect(*ip, pubkey)
|
|
||||||
.await
|
|
||||||
.err_context(CONNECT_ERROR_MESSAGE)
|
|
||||||
{
|
|
||||||
Ok(()) => return Ok(SystemRpc::Ok),
|
|
||||||
Err(e) => {
|
|
||||||
errors.push((*ip, e));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Err(Error::Message(format!(
|
|
||||||
"Could not connect to specified peers. Errors: {:?}",
|
|
||||||
errors
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_pull_cluster_layout(&self) -> SystemRpc {
|
fn handle_pull_cluster_layout(&self) -> SystemRpc {
|
||||||
@ -418,28 +475,7 @@ impl System {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn handle_get_known_nodes(&self) -> SystemRpc {
|
fn handle_get_known_nodes(&self) -> SystemRpc {
|
||||||
let node_status = self.node_status.read().unwrap();
|
let known_nodes = self.get_known_nodes();
|
||||||
let known_nodes = self
|
|
||||||
.fullmesh
|
|
||||||
.get_peer_list()
|
|
||||||
.iter()
|
|
||||||
.map(|n| KnownNodeInfo {
|
|
||||||
id: n.id.into(),
|
|
||||||
addr: n.addr,
|
|
||||||
is_up: n.is_up(),
|
|
||||||
last_seen_secs_ago: n.last_seen.map(|t| (Instant::now() - t).as_secs()),
|
|
||||||
status: node_status
|
|
||||||
.get(&n.id.into())
|
|
||||||
.cloned()
|
|
||||||
.map(|(_, st)| st)
|
|
||||||
.unwrap_or(NodeStatus {
|
|
||||||
hostname: "?".to_string(),
|
|
||||||
replication_factor: 0,
|
|
||||||
cluster_layout_version: 0,
|
|
||||||
cluster_layout_staging_hash: Hash::from([0u8; 32]),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
SystemRpc::ReturnKnownNodes(known_nodes)
|
SystemRpc::ReturnKnownNodes(known_nodes)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -476,7 +512,7 @@ impl System {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_advertise_cluster_layout(
|
async fn handle_advertise_cluster_layout(
|
||||||
self: Arc<Self>,
|
self: &Arc<Self>,
|
||||||
adv: &ClusterLayout,
|
adv: &ClusterLayout,
|
||||||
) -> Result<SystemRpc, Error> {
|
) -> Result<SystemRpc, Error> {
|
||||||
let update_ring = self.update_ring.lock().await;
|
let update_ring = self.update_ring.lock().await;
|
||||||
|
@ -121,6 +121,10 @@ pub struct WebConfig {
|
|||||||
pub struct AdminConfig {
|
pub struct AdminConfig {
|
||||||
/// Address and port to bind for admin API serving
|
/// Address and port to bind for admin API serving
|
||||||
pub api_bind_addr: Option<SocketAddr>,
|
pub api_bind_addr: Option<SocketAddr>,
|
||||||
|
/// Bearer token to use to scrape metrics
|
||||||
|
pub metrics_token: Option<String>,
|
||||||
|
/// Bearer token to use to access Admin API endpoints
|
||||||
|
pub admin_token: Option<String>,
|
||||||
/// OTLP server to where to export traces
|
/// OTLP server to where to export traces
|
||||||
pub trace_sink: Option<String>,
|
pub trace_sink: Option<String>,
|
||||||
}
|
}
|
||||||
|
@ -140,6 +140,11 @@ where
|
|||||||
self.vals.clear();
|
self.vals.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retain only values that match a certain predicate
|
||||||
|
pub fn retain(&mut self, pred: impl FnMut(&(K, u64, V)) -> bool) {
|
||||||
|
self.vals.retain(pred);
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a reference to the value assigned to a key
|
/// Get a reference to the value assigned to a key
|
||||||
pub fn get(&self, k: &K) -> Option<&V> {
|
pub fn get(&self, k: &K) -> Option<&V> {
|
||||||
match self.vals.binary_search_by(|(k2, _, _)| k2.cmp(k)) {
|
match self.vals.binary_search_by(|(k2, _, _)| k2.cmp(k)) {
|
||||||
|
@ -2,57 +2,47 @@ use err_derive::Error;
|
|||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
use garage_util::error::Error as GarageError;
|
use garage_api::generic_server::ApiError;
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// An error received from the API crate
|
/// An error received from the API crate
|
||||||
#[error(display = "API error: {}", _0)]
|
#[error(display = "API error: {}", _0)]
|
||||||
ApiError(#[error(source)] garage_api::Error),
|
ApiError(garage_api::s3::error::Error),
|
||||||
|
|
||||||
// Category: internal error
|
|
||||||
/// Error internal to garage
|
|
||||||
#[error(display = "Internal error: {}", _0)]
|
|
||||||
InternalError(#[error(source)] GarageError),
|
|
||||||
|
|
||||||
/// The file does not exist
|
/// The file does not exist
|
||||||
#[error(display = "Not found")]
|
#[error(display = "Not found")]
|
||||||
NotFound,
|
NotFound,
|
||||||
|
|
||||||
/// The request contained an invalid UTF-8 sequence in its path or in other parameters
|
|
||||||
#[error(display = "Invalid UTF-8: {}", _0)]
|
|
||||||
InvalidUtf8(#[error(source)] std::str::Utf8Error),
|
|
||||||
|
|
||||||
/// The client send a header with invalid value
|
|
||||||
#[error(display = "Invalid header value: {}", _0)]
|
|
||||||
InvalidHeader(#[error(source)] hyper::header::ToStrError),
|
|
||||||
|
|
||||||
/// The client sent a request without host, or with unsupported method
|
/// The client sent a request without host, or with unsupported method
|
||||||
#[error(display = "Bad request: {}", _0)]
|
#[error(display = "Bad request: {}", _0)]
|
||||||
BadRequest(String),
|
BadRequest(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
garage_api::s3::error::Error: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::ApiError(garage_api::s3::error::Error::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
/// Transform errors into http status code
|
/// Transform errors into http status code
|
||||||
pub fn http_status_code(&self) -> StatusCode {
|
pub fn http_status_code(&self) -> StatusCode {
|
||||||
match self {
|
match self {
|
||||||
Error::NotFound => StatusCode::NOT_FOUND,
|
Error::NotFound => StatusCode::NOT_FOUND,
|
||||||
Error::ApiError(e) => e.http_status_code(),
|
Error::ApiError(e) => e.http_status_code(),
|
||||||
Error::InternalError(
|
Error::BadRequest(_) => StatusCode::BAD_REQUEST,
|
||||||
GarageError::Timeout
|
|
||||||
| GarageError::RemoteError(_)
|
|
||||||
| GarageError::Quorum(_, _, _, _),
|
|
||||||
) => StatusCode::SERVICE_UNAVAILABLE,
|
|
||||||
Error::InternalError(_) => StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
_ => StatusCode::BAD_REQUEST,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_headers(&self, header_map: &mut HeaderMap<HeaderValue>) {
|
pub fn add_headers(&self, header_map: &mut HeaderMap<HeaderValue>) {
|
||||||
#[allow(clippy::single_match)]
|
#[allow(clippy::single_match)]
|
||||||
match self {
|
match self {
|
||||||
Error::ApiError(e) => e.add_headers(header_map),
|
Error::ApiError(e) => e.add_http_headers(header_map),
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,11 @@ use opentelemetry::{
|
|||||||
|
|
||||||
use crate::error::*;
|
use crate::error::*;
|
||||||
|
|
||||||
use garage_api::error::{Error as ApiError, OkOrBadRequest, OkOrInternalError};
|
|
||||||
use garage_api::helpers::{authority_to_host, host_to_bucket};
|
use garage_api::helpers::{authority_to_host, host_to_bucket};
|
||||||
use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket};
|
use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket};
|
||||||
|
use garage_api::s3::error::{
|
||||||
|
CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError,
|
||||||
|
};
|
||||||
use garage_api::s3::get::{handle_get, handle_head};
|
use garage_api::s3::get::{handle_get, handle_head};
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
@ -207,7 +209,7 @@ async fn serve_file(garage: Arc<Garage>, req: &Request<Body>) -> Result<Response
|
|||||||
Method::OPTIONS => handle_options_for_bucket(req, &bucket),
|
Method::OPTIONS => handle_options_for_bucket(req, &bucket),
|
||||||
Method::HEAD => handle_head(garage.clone(), req, bucket_id, &key, None).await,
|
Method::HEAD => handle_head(garage.clone(), req, bucket_id, &key, None).await,
|
||||||
Method::GET => handle_get(garage.clone(), req, bucket_id, &key, None).await,
|
Method::GET => handle_get(garage.clone(), req, bucket_id, &key, None).await,
|
||||||
_ => Err(ApiError::BadRequest("HTTP method not supported".into())),
|
_ => Err(ApiError::bad_request("HTTP method not supported")),
|
||||||
}
|
}
|
||||||
.map_err(Error::from);
|
.map_err(Error::from);
|
||||||
|
|
||||||
@ -290,9 +292,7 @@ fn path_to_key<'a>(path: &'a str, index: &str) -> Result<Cow<'a, str>, Error> {
|
|||||||
let path_utf8 = percent_encoding::percent_decode_str(path).decode_utf8()?;
|
let path_utf8 = percent_encoding::percent_decode_str(path).decode_utf8()?;
|
||||||
|
|
||||||
if !path_utf8.starts_with('/') {
|
if !path_utf8.starts_with('/') {
|
||||||
return Err(Error::BadRequest(
|
return Err(Error::BadRequest("Path must start with a / (slash)".into()));
|
||||||
"Path must start with a / (slash)".to_string(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match path_utf8.chars().last() {
|
match path_utf8.chars().last() {
|
||||||
|
Loading…
Reference in New Issue
Block a user