[dep-upgrade-202402] wip: port to http/hyper crates v1

This commit is contained in:
Alex 2024-02-05 18:49:54 +01:00
parent 6e69a1fffc
commit 0bb5b77530
Signed by untrusted user: lx
GPG key ID: 0E496D15096376BE
28 changed files with 765 additions and 477 deletions

153
Cargo.lock generated
View file

@ -211,7 +211,7 @@ dependencies = [
"fastrand", "fastrand",
"hex", "hex",
"http 0.2.11", "http 0.2.11",
"hyper", "hyper 0.14.28",
"ring 0.17.7", "ring 0.17.7",
"time", "time",
"tokio", "tokio",
@ -248,7 +248,7 @@ dependencies = [
"bytes", "bytes",
"fastrand", "fastrand",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"tracing", "tracing",
@ -298,7 +298,7 @@ dependencies = [
"aws-types", "aws-types",
"bytes", "bytes",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
"regex-lite", "regex-lite",
@ -426,7 +426,7 @@ dependencies = [
"crc32fast", "crc32fast",
"hex", "hex",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"md-5", "md-5",
"pin-project-lite", "pin-project-lite",
"sha1", "sha1",
@ -458,7 +458,7 @@ dependencies = [
"bytes-utils", "bytes-utils",
"futures-core", "futures-core",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
@ -497,10 +497,10 @@ dependencies = [
"aws-smithy-types", "aws-smithy-types",
"bytes", "bytes",
"fastrand", "fastrand",
"h2", "h2 0.3.24",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"hyper", "hyper 0.14.28",
"hyper-rustls 0.24.2", "hyper-rustls 0.24.2",
"once_cell", "once_cell",
"pin-project-lite", "pin-project-lite",
@ -537,7 +537,7 @@ dependencies = [
"bytes-utils", "bytes-utils",
"futures-core", "futures-core",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"itoa", "itoa",
"num-integer", "num-integer",
"pin-project-lite", "pin-project-lite",
@ -1298,8 +1298,8 @@ dependencies = [
"git-version", "git-version",
"hex", "hex",
"hmac", "hmac",
"http 0.2.11", "http 1.0.0",
"hyper", "hyper 1.1.0",
"k2v-client", "k2v-client",
"kuska-sodiumoxide", "kuska-sodiumoxide",
"mktemp", "mktemp",
@ -1343,11 +1343,12 @@ dependencies = [
"garage_util", "garage_util",
"hex", "hex",
"hmac", "hmac",
"http 0.2.11", "http 1.0.0",
"http-body-util",
"http-range", "http-range",
"httpdate", "httpdate",
"hyper", "hyper 1.1.0",
"hyperlocal", "hyper-util",
"idna", "idna",
"md-5", "md-5",
"multer", "multer",
@ -1509,8 +1510,8 @@ dependencies = [
"garage_db", "garage_db",
"hex", "hex",
"hexdump", "hexdump",
"http 0.2.11", "http 1.0.0",
"hyper", "hyper 1.1.0",
"lazy_static", "lazy_static",
"mktemp", "mktemp",
"netapp", "netapp",
@ -1537,9 +1538,10 @@ dependencies = [
"garage_model", "garage_model",
"garage_table", "garage_table",
"garage_util", "garage_util",
"http 0.2.11", "http 1.0.0",
"hyper", "http-body-util",
"hyperlocal", "hyper 1.1.0",
"hyper-util",
"opentelemetry", "opentelemetry",
"percent-encoding", "percent-encoding",
"tokio", "tokio",
@ -1633,6 +1635,25 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "h2"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943"
dependencies = [
"bytes",
"fnv",
"futures-core",
"futures-sink",
"futures-util",
"http 1.0.0",
"indexmap 2.2.2",
"slab",
"tokio",
"tokio-util 0.7.10",
"tracing",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -1784,6 +1805,29 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "http-body"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"
dependencies = [
"bytes",
"http 1.0.0",
]
[[package]]
name = "http-body-util"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840"
dependencies = [
"bytes",
"futures-util",
"http 1.0.0",
"http-body 1.0.0",
"pin-project-lite",
]
[[package]] [[package]]
name = "http-range" name = "http-range"
version = "0.1.5" version = "0.1.5"
@ -1818,9 +1862,9 @@ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"futures-util", "futures-util",
"h2", "h2 0.3.24",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"httparse", "httparse",
"httpdate", "httpdate",
"itoa", "itoa",
@ -1832,6 +1876,26 @@ dependencies = [
"want", "want",
] ]
[[package]]
name = "hyper"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"h2 0.4.2",
"http 1.0.0",
"http-body 1.0.0",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"tokio",
"want",
]
[[package]] [[package]]
name = "hyper-rustls" name = "hyper-rustls"
version = "0.23.2" version = "0.23.2"
@ -1839,7 +1903,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
dependencies = [ dependencies = [
"http 0.2.11", "http 0.2.11",
"hyper", "hyper 0.14.28",
"log", "log",
"rustls 0.20.9", "rustls 0.20.9",
"rustls-native-certs", "rustls-native-certs",
@ -1855,7 +1919,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
dependencies = [ dependencies = [
"futures-util", "futures-util",
"http 0.2.11", "http 0.2.11",
"hyper", "hyper 0.14.28",
"log", "log",
"rustls 0.21.10", "rustls 0.21.10",
"rustls-native-certs", "rustls-native-certs",
@ -1869,23 +1933,30 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
dependencies = [ dependencies = [
"hyper", "hyper 0.14.28",
"pin-project-lite", "pin-project-lite",
"tokio", "tokio",
"tokio-io-timeout", "tokio-io-timeout",
] ]
[[package]] [[package]]
name = "hyperlocal" name = "hyper-util"
version = "0.8.0" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c" checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa"
dependencies = [ dependencies = [
"bytes",
"futures-channel",
"futures-util", "futures-util",
"hex", "http 1.0.0",
"hyper", "http-body 1.0.0",
"pin-project", "hyper 1.1.0",
"pin-project-lite",
"socket2",
"tokio", "tokio",
"tower",
"tower-service",
"tracing",
] ]
[[package]] [[package]]
@ -2051,8 +2122,8 @@ dependencies = [
"clap 4.4.18", "clap 4.4.18",
"format_table", "format_table",
"hex", "hex",
"http 0.2.11", "http 1.0.0",
"hyper", "hyper 1.1.0",
"hyper-rustls 0.24.2", "hyper-rustls 0.24.2",
"log", "log",
"percent-encoding", "percent-encoding",
@ -2108,8 +2179,8 @@ dependencies = [
"either", "either",
"futures", "futures",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"hyper", "hyper 0.14.28",
"hyper-rustls 0.23.2", "hyper-rustls 0.23.2",
"hyper-timeout", "hyper-timeout",
"jsonpath_lib", "jsonpath_lib",
@ -3104,10 +3175,10 @@ dependencies = [
"encoding_rs", "encoding_rs",
"futures-core", "futures-core",
"futures-util", "futures-util",
"h2", "h2 0.3.24",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"hyper", "hyper 0.14.28",
"hyper-rustls 0.24.2", "hyper-rustls 0.24.2",
"ipnet", "ipnet",
"js-sys", "js-sys",
@ -4014,10 +4085,10 @@ dependencies = [
"bytes", "bytes",
"futures-core", "futures-core",
"futures-util", "futures-util",
"h2", "h2 0.3.24",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"hyper", "hyper 0.14.28",
"hyper-timeout", "hyper-timeout",
"percent-encoding", "percent-encoding",
"pin-project", "pin-project",
@ -4077,7 +4148,7 @@ dependencies = [
"futures-core", "futures-core",
"futures-util", "futures-util",
"http 0.2.11", "http 0.2.11",
"http-body", "http-body 0.4.6",
"http-range-header", "http-range-header",
"pin-project-lite", "pin-project-lite",
"tower-layer", "tower-layer",

179
Cargo.nix
View file

@ -33,7 +33,7 @@ args@{
ignoreLockHash, ignoreLockHash,
}: }:
let let
nixifiedLockHash = "8cb28c3a1c83ae5f6a9285b72c92830344521b18d10973434e2706b4033b8907"; nixifiedLockHash = "b09e8e1592cb6ec8175708b13ee4a2578aa697c18a94d5a545328078ab263b2f";
workspaceSrc = if args.workspaceSrc == null then ./. else args.workspaceSrc; workspaceSrc = if args.workspaceSrc == null then ./. else args.workspaceSrc;
currentLockHash = builtins.hashFile "sha256" (workspaceSrc + /Cargo.lock); currentLockHash = builtins.hashFile "sha256" (workspaceSrc + /Cargo.lock);
lockHashIgnored = if ignoreLockHash lockHashIgnored = if ignoreLockHash
@ -1916,8 +1916,8 @@ in
base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out;
chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.33" { inherit profileName; }).out; chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.33" { inherit profileName; }).out;
hmac = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hmac."0.12.1" { inherit profileName; }).out; hmac = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hmac."0.12.1" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
k2v_client = (rustPackages."unknown".k2v-client."0.0.4" { inherit profileName; }).out; k2v_client = (rustPackages."unknown".k2v-client."0.0.4" { inherit profileName; }).out;
mktemp = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mktemp."0.5.1" { inherit profileName; }).out; mktemp = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mktemp."0.5.1" { inherit profileName; }).out;
serde_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.113" { inherit profileName; }).out; serde_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.113" { inherit profileName; }).out;
@ -1954,11 +1954,12 @@ in
garage_util = (rustPackages."unknown".garage_util."0.9.1" { inherit profileName; }).out; garage_util = (rustPackages."unknown".garage_util."0.9.1" { inherit profileName; }).out;
hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out;
hmac = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hmac."0.12.1" { inherit profileName; }).out; hmac = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hmac."0.12.1" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.0" { inherit profileName; }).out;
http_range = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-range."0.1.5" { inherit profileName; }).out; http_range = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-range."0.1.5" { inherit profileName; }).out;
httpdate = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.3" { inherit profileName; }).out; httpdate = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.3" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
hyperlocal = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyperlocal."0.8.0" { inherit profileName; }).out; hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out;
idna = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".idna."0.5.0" { inherit profileName; }).out; idna = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".idna."0.5.0" { inherit profileName; }).out;
md5 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".md-5."0.10.6" { inherit profileName; }).out; md5 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".md-5."0.10.6" { inherit profileName; }).out;
multer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".multer."3.0.0" { inherit profileName; }).out; multer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".multer."3.0.0" { inherit profileName; }).out;
@ -2172,8 +2173,8 @@ in
garage_db = (rustPackages."unknown".garage_db."0.9.1" { inherit profileName; }).out; garage_db = (rustPackages."unknown".garage_db."0.9.1" { inherit profileName; }).out;
hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out;
hexdump = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hexdump."0.1.1" { inherit profileName; }).out; hexdump = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hexdump."0.1.1" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
lazy_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out; lazy_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out;
netapp = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".netapp."0.10.0" { inherit profileName; }).out; netapp = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".netapp."0.10.0" { inherit profileName; }).out;
opentelemetry = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; }).out; opentelemetry = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; }).out;
@ -2207,9 +2208,10 @@ in
garage_model = (rustPackages."unknown".garage_model."0.9.1" { inherit profileName; }).out; garage_model = (rustPackages."unknown".garage_model."0.9.1" { inherit profileName; }).out;
garage_table = (rustPackages."unknown".garage_table."0.9.1" { inherit profileName; }).out; garage_table = (rustPackages."unknown".garage_table."0.9.1" { inherit profileName; }).out;
garage_util = (rustPackages."unknown".garage_util."0.9.1" { inherit profileName; }).out; garage_util = (rustPackages."unknown".garage_util."0.9.1" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.0" { inherit profileName; }).out;
hyperlocal = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyperlocal."0.8.0" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out;
opentelemetry = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; }).out; opentelemetry = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".opentelemetry."0.17.0" { inherit profileName; }).out;
percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out;
tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out;
@ -2324,6 +2326,26 @@ in
}; };
}); });
"registry+https://github.com/rust-lang/crates.io-index".h2."0.4.2" = overridableMkRustCrate (profileName: rec {
name = "h2";
version = "0.4.2";
registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943"; };
dependencies = {
bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out;
fnv = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fnv."1.0.7" { inherit profileName; }).out;
futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out;
futures_sink = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-sink."0.3.30" { inherit profileName; }).out;
futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."2.2.2" { inherit profileName; }).out;
slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out;
tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out;
tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out;
tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out;
};
});
"registry+https://github.com/rust-lang/crates.io-index".hashbrown."0.12.3" = overridableMkRustCrate (profileName: rec { "registry+https://github.com/rust-lang/crates.io-index".hashbrown."0.12.3" = overridableMkRustCrate (profileName: rec {
name = "hashbrown"; name = "hashbrown";
version = "0.12.3"; version = "0.12.3";
@ -2531,6 +2553,31 @@ in
}; };
}); });
"registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" = overridableMkRustCrate (profileName: rec {
name = "http-body";
version = "1.0.0";
registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"; };
dependencies = {
bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
};
});
"registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.0" = overridableMkRustCrate (profileName: rec {
name = "http-body-util";
version = "0.1.0";
registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840"; };
dependencies = {
bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out;
futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out;
pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out;
};
});
"registry+https://github.com/rust-lang/crates.io-index".http-range."0.1.5" = overridableMkRustCrate (profileName: rec { "registry+https://github.com/rust-lang/crates.io-index".http-range."0.1.5" = overridableMkRustCrate (profileName: rec {
name = "http-range"; name = "http-range";
version = "0.1.5"; version = "0.1.5";
@ -2569,10 +2616,8 @@ in
registry = "registry+https://github.com/rust-lang/crates.io-index"; registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"; }; src = fetchCratesIo { inherit name version; sha256 = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"; };
features = builtins.concatLists [ features = builtins.concatLists [
[ "backports" ]
[ "client" ] [ "client" ]
[ "default" ] (lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "default")
[ "deprecated" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "full") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "full")
[ "h2" ] [ "h2" ]
[ "http1" ] [ "http1" ]
@ -2603,6 +2648,34 @@ in
}; };
}); });
"registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" = overridableMkRustCrate (profileName: rec {
name = "hyper";
version = "1.1.0";
registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75"; };
features = builtins.concatLists [
[ "client" ]
[ "default" ]
[ "http1" ]
[ "http2" ]
[ "server" ]
];
dependencies = {
bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out;
futures_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.30" { inherit profileName; }).out;
futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out;
h2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.4.2" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out;
httparse = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httparse."1.8.0" { inherit profileName; }).out;
httpdate = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.3" { inherit profileName; }).out;
itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out;
pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out;
tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out;
want = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".want."0.3.1" { inherit profileName; }).out;
};
});
"registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.23.2" = overridableMkRustCrate (profileName: rec { "registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.23.2" = overridableMkRustCrate (profileName: rec {
name = "hyper-rustls"; name = "hyper-rustls";
version = "0.23.2"; version = "0.23.2";
@ -2671,20 +2744,36 @@ in
}; };
}); });
"registry+https://github.com/rust-lang/crates.io-index".hyperlocal."0.8.0" = overridableMkRustCrate (profileName: rec { "registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" = overridableMkRustCrate (profileName: rec {
name = "hyperlocal"; name = "hyper-util";
version = "0.8.0"; version = "0.1.3";
registry = "registry+https://github.com/rust-lang/crates.io-index"; registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c"; }; src = fetchCratesIo { inherit name version; sha256 = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa"; };
features = builtins.concatLists [ features = builtins.concatLists [
[ "client" ]
[ "client-legacy" ]
[ "default" ]
[ "full" ]
[ "http1" ]
[ "http2" ]
[ "server" ] [ "server" ]
[ "server-auto" ]
[ "service" ]
[ "tokio" ]
]; ];
dependencies = { dependencies = {
bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out;
futures_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.30" { inherit profileName; }).out;
futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out;
hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out;
pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.4" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out;
socket2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.5.5" { inherit profileName; }).out;
tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out;
tower = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower."0.4.13" { inherit profileName; }).out;
tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out;
tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out;
}; };
}); });
@ -2917,8 +3006,8 @@ in
${ if rootFeatures' ? "k2v-client/clap" || rootFeatures' ? "k2v-client/cli" then "clap" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".clap."4.4.18" { inherit profileName; }).out; ${ if rootFeatures' ? "k2v-client/clap" || rootFeatures' ? "k2v-client/cli" then "clap" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".clap."4.4.18" { inherit profileName; }).out;
${ if rootFeatures' ? "k2v-client/cli" || rootFeatures' ? "k2v-client/format_table" then "format_table" else null } = (rustPackages."unknown".format_table."0.1.1" { inherit profileName; }).out; ${ if rootFeatures' ? "k2v-client/cli" || rootFeatures' ? "k2v-client/format_table" then "format_table" else null } = (rustPackages."unknown".format_table."0.1.1" { inherit profileName; }).out;
hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out;
http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out;
hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.1.0" { inherit profileName; }).out;
hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.24.2" { inherit profileName; }).out; hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.24.2" { inherit profileName; }).out;
log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out;
percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out;
@ -5523,7 +5612,7 @@ in
[ "default" ] [ "default" ]
[ "fs" ] [ "fs" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "full") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "full")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "k2v-client/cli") "io-std") [ "io-std" ]
[ "io-util" ] [ "io-util" ]
[ "libc" ] [ "libc" ]
[ "macros" ] [ "macros" ]
@ -5795,43 +5884,43 @@ in
registry = "registry+https://github.com/rust-lang/crates.io-index"; registry = "registry+https://github.com/rust-lang/crates.io-index";
src = fetchCratesIo { inherit name version; sha256 = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"; }; src = fetchCratesIo { inherit name version; sha256 = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"; };
features = builtins.concatLists [ features = builtins.concatLists [
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "__common") [ "__common" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "balance") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "balance")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "buffer") (lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "buffer")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "default") [ "default" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "discover") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "discover")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "filter") (lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "filter")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "futures-core") [ "futures-core" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "futures-util") [ "futures-util" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "indexmap") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "indexmap")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "limit") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "limit")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "load") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "load")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "log") [ "log" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "make") [ "make" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "pin-project") [ "pin-project" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "pin-project-lite") [ "pin-project-lite" ]
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "rand") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "rand")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "ready-cache") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "ready-cache")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "slab") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "slab")
(lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "timeout") (lib.optional (rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp") "timeout")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "tokio") [ "tokio" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "tokio-util") (lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "tokio-util")
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "tracing") [ "tracing" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "util") [ "util" ]
]; ];
dependencies = { dependencies = {
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "futures_core" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "futures_util" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "indexmap" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."1.9.3" { inherit profileName; }).out; ${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "indexmap" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."1.9.3" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "pin_project" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.4" { inherit profileName; }).out; pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.4" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "pin_project_lite" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "rand" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; ${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "rand" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "slab" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; ${ if rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" then "slab" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tokio" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tokio_util" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; ${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tokio_util" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tower_layer" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out; tower_layer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tower_service" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out;
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "tracing" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out;
}; };
}); });
@ -5886,12 +5975,12 @@ in
features = builtins.concatLists [ features = builtins.concatLists [
[ "attributes" ] [ "attributes" ]
[ "default" ] [ "default" ]
(lib.optional (rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery") "log") [ "log" ]
[ "std" ] [ "std" ]
[ "tracing-attributes" ] [ "tracing-attributes" ]
]; ];
dependencies = { dependencies = {
${ if rootFeatures' ? "garage/kubernetes-discovery" || rootFeatures' ? "garage/opentelemetry-otlp" || rootFeatures' ? "garage/telemetry-otlp" || rootFeatures' ? "garage_rpc/kube" || rootFeatures' ? "garage_rpc/kubernetes-discovery" then "log" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out;
pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out;
tracing_attributes = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-attributes."0.1.27" { profileName = "__noProfile"; }).out; tracing_attributes = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-attributes."0.1.27" { profileName = "__noProfile"; }).out;
tracing_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.32" { inherit profileName; }).out; tracing_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.32" { inherit profileName; }).out;

View file

@ -93,11 +93,12 @@ schemars = "0.8"
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-manual-roots", "json"] } reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-manual-roots", "json"] }
form_urlencoded = "1.0.0" form_urlencoded = "1.0.0"
http = "0.2" http = "1.0"
httpdate = "1.0" httpdate = "1.0"
http-range = "0.1" http-range = "0.1"
hyper = { version = "0.14", features = ["server", "http1", "runtime", "tcp", "stream", "backports", "deprecated"] } http-body-util = "0.1"
hyperlocal = { version = "0.8.0", default-features = false, features = ["server"] } hyper = { version = "1.0", features = ["server", "http1"] }
hyper-util = { verion = "0.1", features = [ "full" ]}
multer = "3.0" multer = "3.0"
percent-encoding = "2.2" percent-encoding = "2.2"
roxmltree = "0.19" roxmltree = "0.19"

View file

@ -44,8 +44,9 @@ form_urlencoded.workspace = true
http.workspace = true http.workspace = true
httpdate.workspace = true httpdate.workspace = true
http-range.workspace = true http-range.workspace = true
http-body-util.workspace = true
hyper.workspace = true hyper.workspace = true
hyperlocal.workspace = true hyper-util.workspace = true
multer.workspace = true multer.workspace = true
percent-encoding.workspace = true percent-encoding.workspace = true
roxmltree.workspace = true roxmltree.workspace = true

View file

@ -5,7 +5,7 @@ use async_trait::async_trait;
use futures::future::Future; use futures::future::Future;
use http::header::{ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ALLOW}; use http::header::{ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ALLOW};
use hyper::{Body, Request, Response, StatusCode}; use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
use opentelemetry::trace::SpanRef; use opentelemetry::trace::SpanRef;
@ -27,7 +27,9 @@ use crate::admin::error::*;
use crate::admin::key::*; use crate::admin::key::*;
use crate::admin::router_v0; use crate::admin::router_v0;
use crate::admin::router_v1::{Authorization, Endpoint}; use crate::admin::router_v1::{Authorization, Endpoint};
use crate::helpers::host_to_bucket; use crate::helpers::*;
pub type ResBody = BoxBody<Error>;
pub struct AdminApiServer { pub struct AdminApiServer {
garage: Arc<Garage>, garage: Arc<Garage>,
@ -71,16 +73,19 @@ impl AdminApiServer {
.await .await
} }
fn handle_options(&self, _req: &Request<Body>) -> Result<Response<Body>, Error> { fn handle_options(&self, _req: &Request<IncomingBody>) -> Result<Response<ResBody>, Error> {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.header(ALLOW, "OPTIONS, GET, POST") .header(ALLOW, "OPTIONS, GET, POST")
.header(ACCESS_CONTROL_ALLOW_METHODS, "OPTIONS, GET, POST") .header(ACCESS_CONTROL_ALLOW_METHODS, "OPTIONS, GET, POST")
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*") .header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.body(Body::empty())?) .body(empty_body())?)
} }
async fn handle_check_domain(&self, req: Request<Body>) -> Result<Response<Body>, Error> { async fn handle_check_domain(
&self,
req: Request<IncomingBody>,
) -> Result<Response<ResBody>, Error> {
let query_params: HashMap<String, String> = req let query_params: HashMap<String, String> = req
.uri() .uri()
.query() .query()
@ -104,7 +109,7 @@ impl AdminApiServer {
if self.check_domain(domain).await? { if self.check_domain(domain).await? {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::from(format!( .body(string_body(format!(
"Domain '{domain}' is managed by Garage" "Domain '{domain}' is managed by Garage"
)))?) )))?)
} else { } else {
@ -167,7 +172,7 @@ impl AdminApiServer {
} }
} }
fn handle_health(&self) -> Result<Response<Body>, Error> { fn handle_health(&self) -> Result<Response<ResBody>, Error> {
let health = self.garage.system.health(); let health = self.garage.system.health();
let (status, status_str) = match health.status { let (status, status_str) = match health.status {
@ -189,10 +194,10 @@ impl AdminApiServer {
Ok(Response::builder() Ok(Response::builder()
.status(status) .status(status)
.header(http::header::CONTENT_TYPE, "text/plain") .header(http::header::CONTENT_TYPE, "text/plain")
.body(Body::from(status_str))?) .body(string_body(status_str))?)
} }
fn handle_metrics(&self) -> Result<Response<Body>, Error> { fn handle_metrics(&self) -> Result<Response<ResBody>, Error> {
#[cfg(feature = "metrics")] #[cfg(feature = "metrics")]
{ {
use opentelemetry::trace::Tracer; use opentelemetry::trace::Tracer;
@ -212,7 +217,7 @@ impl AdminApiServer {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.header(http::header::CONTENT_TYPE, encoder.format_type()) .header(http::header::CONTENT_TYPE, encoder.format_type())
.body(Body::from(buffer))?) .body(bytes_body(buffer.into()))?)
} }
#[cfg(not(feature = "metrics"))] #[cfg(not(feature = "metrics"))]
Err(Error::bad_request( Err(Error::bad_request(
@ -229,7 +234,7 @@ impl ApiHandler for AdminApiServer {
type Endpoint = Endpoint; type Endpoint = Endpoint;
type Error = Error; type Error = Error;
fn parse_endpoint(&self, req: &Request<Body>) -> Result<Endpoint, Error> { fn parse_endpoint(&self, req: &Request<IncomingBody>) -> Result<Endpoint, Error> {
if req.uri().path().starts_with("/v0/") { if req.uri().path().starts_with("/v0/") {
let endpoint_v0 = router_v0::Endpoint::from_request(req)?; let endpoint_v0 = router_v0::Endpoint::from_request(req)?;
Endpoint::from_v0(endpoint_v0) Endpoint::from_v0(endpoint_v0)
@ -240,9 +245,9 @@ impl ApiHandler for AdminApiServer {
async fn handle( async fn handle(
&self, &self,
req: Request<Body>, req: Request<IncomingBody>,
endpoint: Endpoint, endpoint: Endpoint,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let expected_auth_header = let expected_auth_header =
match endpoint.authorization_type() { match endpoint.authorization_type() {
Authorization::None => None, Authorization::None => None,

View file

@ -1,7 +1,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Request, Response, StatusCode}; use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use garage_util::crdt::*; use garage_util::crdt::*;
@ -17,12 +17,13 @@ use garage_model::permission::*;
use garage_model::s3::mpu_table; use garage_model::s3::mpu_table;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use crate::admin::api_server::ResBody;
use crate::admin::error::*; use crate::admin::error::*;
use crate::admin::key::ApiBucketKeyPerm; use crate::admin::key::ApiBucketKeyPerm;
use crate::common_error::CommonError; use crate::common_error::CommonError;
use crate::helpers::{json_ok_response, parse_json_body}; use crate::helpers::*;
pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<Body>, Error> { pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let buckets = garage let buckets = garage
.bucket_table .bucket_table
.get_range( .get_range(
@ -90,7 +91,7 @@ pub async fn handle_get_bucket_info(
garage: &Arc<Garage>, garage: &Arc<Garage>,
id: Option<String>, id: Option<String>,
global_alias: Option<String>, global_alias: Option<String>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket_id = match (id, global_alias) { let bucket_id = match (id, global_alias) {
(Some(id), None) => parse_bucket_id(&id)?, (Some(id), None) => parse_bucket_id(&id)?,
(None, Some(ga)) => garage (None, Some(ga)) => garage
@ -111,7 +112,7 @@ pub async fn handle_get_bucket_info(
async fn bucket_info_results( async fn bucket_info_results(
garage: &Arc<Garage>, garage: &Arc<Garage>,
bucket_id: Uuid, bucket_id: Uuid,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket = garage let bucket = garage
.bucket_helper() .bucket_helper()
.get_existing_bucket(bucket_id) .get_existing_bucket(bucket_id)
@ -268,8 +269,8 @@ struct GetBucketInfoKey {
pub async fn handle_create_bucket( pub async fn handle_create_bucket(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<CreateBucketRequest>(req).await?; let req = parse_json_body::<CreateBucketRequest>(req).await?;
if let Some(ga) = &req.global_alias { if let Some(ga) = &req.global_alias {
@ -360,7 +361,7 @@ struct CreateBucketLocalAlias {
pub async fn handle_delete_bucket( pub async fn handle_delete_bucket(
garage: &Arc<Garage>, garage: &Arc<Garage>,
id: String, id: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let helper = garage.bucket_helper(); let helper = garage.bucket_helper();
let bucket_id = parse_bucket_id(&id)?; let bucket_id = parse_bucket_id(&id)?;
@ -403,14 +404,14 @@ pub async fn handle_delete_bucket(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
pub async fn handle_update_bucket( pub async fn handle_update_bucket(
garage: &Arc<Garage>, garage: &Arc<Garage>,
id: String, id: String,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<UpdateBucketRequest>(req).await?; let req = parse_json_body::<UpdateBucketRequest>(req).await?;
let bucket_id = parse_bucket_id(&id)?; let bucket_id = parse_bucket_id(&id)?;
@ -470,9 +471,9 @@ struct UpdateBucketWebsiteAccess {
pub async fn handle_bucket_change_key_perm( pub async fn handle_bucket_change_key_perm(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
new_perm_flag: bool, new_perm_flag: bool,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<BucketKeyPermChangeRequest>(req).await?; let req = parse_json_body::<BucketKeyPermChangeRequest>(req).await?;
let bucket_id = parse_bucket_id(&req.bucket_id)?; let bucket_id = parse_bucket_id(&req.bucket_id)?;
@ -526,7 +527,7 @@ pub async fn handle_global_alias_bucket(
garage: &Arc<Garage>, garage: &Arc<Garage>,
bucket_id: String, bucket_id: String,
alias: String, alias: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket_id = parse_bucket_id(&bucket_id)?; let bucket_id = parse_bucket_id(&bucket_id)?;
garage garage
@ -541,7 +542,7 @@ pub async fn handle_global_unalias_bucket(
garage: &Arc<Garage>, garage: &Arc<Garage>,
bucket_id: String, bucket_id: String,
alias: String, alias: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket_id = parse_bucket_id(&bucket_id)?; let bucket_id = parse_bucket_id(&bucket_id)?;
garage garage
@ -557,7 +558,7 @@ pub async fn handle_local_alias_bucket(
bucket_id: String, bucket_id: String,
access_key_id: String, access_key_id: String,
alias: String, alias: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket_id = parse_bucket_id(&bucket_id)?; let bucket_id = parse_bucket_id(&bucket_id)?;
garage garage
@ -573,7 +574,7 @@ pub async fn handle_local_unalias_bucket(
bucket_id: String, bucket_id: String,
access_key_id: String, access_key_id: String,
alias: String, alias: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let bucket_id = parse_bucket_id(&bucket_id)?; let bucket_id = parse_bucket_id(&bucket_id)?;
garage garage

View file

@ -1,7 +1,7 @@
use std::net::SocketAddr; use std::net::SocketAddr;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Request, Response}; use hyper::{body::Incoming as IncomingBody, Request, Response};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use garage_util::crdt::*; use garage_util::crdt::*;
@ -11,10 +11,11 @@ use garage_rpc::layout;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use crate::admin::api_server::ResBody;
use crate::admin::error::*; use crate::admin::error::*;
use crate::helpers::{json_ok_response, parse_json_body}; use crate::helpers::{json_ok_response, parse_json_body};
pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<Body>, Error> { pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let res = GetClusterStatusResponse { let res = GetClusterStatusResponse {
node: hex::encode(garage.system.id), node: hex::encode(garage.system.id),
garage_version: garage_util::version::garage_version(), garage_version: garage_util::version::garage_version(),
@ -39,7 +40,7 @@ pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<
Ok(json_ok_response(&res)?) Ok(json_ok_response(&res)?)
} }
pub async fn handle_get_cluster_health(garage: &Arc<Garage>) -> Result<Response<Body>, Error> { pub async fn handle_get_cluster_health(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
use garage_rpc::system::ClusterHealthStatus; use garage_rpc::system::ClusterHealthStatus;
let health = garage.system.health(); let health = garage.system.health();
let health = ClusterHealth { let health = ClusterHealth {
@ -61,8 +62,8 @@ pub async fn handle_get_cluster_health(garage: &Arc<Garage>) -> Result<Response<
pub async fn handle_connect_cluster_nodes( pub async fn handle_connect_cluster_nodes(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<Vec<String>>(req).await?; let req = parse_json_body::<Vec<String>>(req).await?;
let res = futures::future::join_all(req.iter().map(|node| garage.system.connect(node))) let res = futures::future::join_all(req.iter().map(|node| garage.system.connect(node)))
@ -83,7 +84,7 @@ pub async fn handle_connect_cluster_nodes(
Ok(json_ok_response(&res)?) Ok(json_ok_response(&res)?)
} }
pub async fn handle_get_cluster_layout(garage: &Arc<Garage>) -> Result<Response<Body>, Error> { pub async fn handle_get_cluster_layout(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let res = format_cluster_layout(&garage.system.get_cluster_layout()); let res = format_cluster_layout(&garage.system.get_cluster_layout());
Ok(json_ok_response(&res)?) Ok(json_ok_response(&res)?)
@ -203,8 +204,8 @@ struct KnownNodeResp {
pub async fn handle_update_cluster_layout( pub async fn handle_update_cluster_layout(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let updates = parse_json_body::<UpdateClusterLayoutRequest>(req).await?; let updates = parse_json_body::<UpdateClusterLayoutRequest>(req).await?;
let mut layout = garage.system.get_cluster_layout(); let mut layout = garage.system.get_cluster_layout();
@ -243,8 +244,8 @@ pub async fn handle_update_cluster_layout(
pub async fn handle_apply_cluster_layout( pub async fn handle_apply_cluster_layout(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?; let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?;
let layout = garage.system.get_cluster_layout(); let layout = garage.system.get_cluster_layout();
@ -261,8 +262,8 @@ pub async fn handle_apply_cluster_layout(
pub async fn handle_revert_cluster_layout( pub async fn handle_revert_cluster_layout(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?; let param = parse_json_body::<ApplyRevertLayoutRequest>(req).await?;
let layout = garage.system.get_cluster_layout(); let layout = garage.system.get_cluster_layout();

View file

@ -1,13 +1,13 @@
use err_derive::Error; use err_derive::Error;
use hyper::header::HeaderValue; use hyper::header::HeaderValue;
use hyper::{Body, HeaderMap, StatusCode}; use hyper::{HeaderMap, StatusCode};
pub use garage_model::helper::error::Error as HelperError; pub use garage_model::helper::error::Error as HelperError;
use crate::common_error::CommonError; use crate::common_error::CommonError;
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError}; pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
use crate::generic_server::ApiError; use crate::generic_server::ApiError;
use crate::helpers::CustomApiErrorBody; use crate::helpers::{BytesBody, CustomApiErrorBody};
/// Errors of this crate /// Errors of this crate
#[derive(Debug, Error)] #[derive(Debug, Error)]
@ -77,14 +77,14 @@ impl ApiError for Error {
header_map.append(header::CONTENT_TYPE, "application/json".parse().unwrap()); header_map.append(header::CONTENT_TYPE, "application/json".parse().unwrap());
} }
fn http_body(&self, garage_region: &str, path: &str) -> Body { fn http_body(&self, garage_region: &str, path: &str) -> BytesBody {
let error = CustomApiErrorBody { let error = CustomApiErrorBody {
code: self.code().to_string(), code: self.code().to_string(),
message: format!("{}", self), message: format!("{}", self),
path: path.to_string(), path: path.to_string(),
region: garage_region.to_string(), region: garage_region.to_string(),
}; };
Body::from(serde_json::to_string_pretty(&error).unwrap_or_else(|_| { let error_str = serde_json::to_string_pretty(&error).unwrap_or_else(|_| {
r#" r#"
{ {
"code": "InternalError", "code": "InternalError",
@ -92,6 +92,7 @@ impl ApiError for Error {
} }
"# "#
.into() .into()
})) });
BytesBody::from(bytes::Bytes::from(error_str.into_bytes()))
} }
} }

View file

@ -1,7 +1,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use hyper::{Body, Request, Response, StatusCode}; use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use garage_table::*; use garage_table::*;
@ -9,10 +9,11 @@ use garage_table::*;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_model::key_table::*; use garage_model::key_table::*;
use crate::admin::api_server::ResBody;
use crate::admin::error::*; use crate::admin::error::*;
use crate::helpers::{is_default, json_ok_response, parse_json_body}; use crate::helpers::*;
pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<Body>, Error> { pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let res = garage let res = garage
.key_table .key_table
.get_range( .get_range(
@ -45,7 +46,7 @@ pub async fn handle_get_key_info(
id: Option<String>, id: Option<String>,
search: Option<String>, search: Option<String>,
show_secret_key: bool, show_secret_key: bool,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let key = if let Some(id) = id { let key = if let Some(id) = id {
garage.key_helper().get_existing_key(&id).await? garage.key_helper().get_existing_key(&id).await?
} else if let Some(search) = search { } else if let Some(search) = search {
@ -62,8 +63,8 @@ pub async fn handle_get_key_info(
pub async fn handle_create_key( pub async fn handle_create_key(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<CreateKeyRequest>(req).await?; let req = parse_json_body::<CreateKeyRequest>(req).await?;
let key = Key::new(req.name.as_deref().unwrap_or("Unnamed key")); let key = Key::new(req.name.as_deref().unwrap_or("Unnamed key"));
@ -80,8 +81,8 @@ struct CreateKeyRequest {
pub async fn handle_import_key( pub async fn handle_import_key(
garage: &Arc<Garage>, garage: &Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<ImportKeyRequest>(req).await?; let req = parse_json_body::<ImportKeyRequest>(req).await?;
let prev_key = garage.key_table.get(&EmptyKey, &req.access_key_id).await?; let prev_key = garage.key_table.get(&EmptyKey, &req.access_key_id).await?;
@ -111,8 +112,8 @@ struct ImportKeyRequest {
pub async fn handle_update_key( pub async fn handle_update_key(
garage: &Arc<Garage>, garage: &Arc<Garage>,
id: String, id: String,
req: Request<Body>, req: Request<IncomingBody>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let req = parse_json_body::<UpdateKeyRequest>(req).await?; let req = parse_json_body::<UpdateKeyRequest>(req).await?;
let mut key = garage.key_helper().get_existing_key(&id).await?; let mut key = garage.key_helper().get_existing_key(&id).await?;
@ -146,7 +147,10 @@ struct UpdateKeyRequest {
deny: Option<KeyPerm>, deny: Option<KeyPerm>,
} }
pub async fn handle_delete_key(garage: &Arc<Garage>, id: String) -> Result<Response<Body>, Error> { pub async fn handle_delete_key(
garage: &Arc<Garage>,
id: String,
) -> Result<Response<ResBody>, Error> {
let mut key = garage.key_helper().get_existing_key(&id).await?; let mut key = garage.key_helper().get_existing_key(&id).await?;
key.state.as_option().unwrap(); key.state.as_option().unwrap();
@ -155,14 +159,14 @@ pub async fn handle_delete_key(garage: &Arc<Garage>, id: String) -> Result<Respo
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
async fn key_info_results( async fn key_info_results(
garage: &Arc<Garage>, garage: &Arc<Garage>,
key: Key, key: Key,
show_secret: bool, show_secret: bool,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let mut relevant_buckets = HashMap::new(); let mut relevant_buckets = HashMap::new();
let key_state = key.state.as_option().unwrap(); let key_state = key.state.as_option().unwrap();

View file

@ -6,15 +6,16 @@ use async_trait::async_trait;
use futures::future::Future; use futures::future::Future;
use http_body_util::BodyExt;
use hyper::header::HeaderValue; use hyper::header::HeaderValue;
use hyper::server::conn::AddrStream; use hyper::server::conn::http1;
use hyper::service::{make_service_fn, service_fn}; use hyper::service::service_fn;
use hyper::{Body, Request, Response, Server}; use hyper::{body::Incoming as IncomingBody, Request, Response};
use hyper::{HeaderMap, StatusCode}; use hyper::{HeaderMap, StatusCode};
use hyper_util::rt::TokioIo;
use hyperlocal::UnixServerExt; use tokio::io::{AsyncRead, AsyncWrite};
use tokio::net::{TcpListener, UnixListener};
use tokio::net::UnixStream;
use opentelemetry::{ use opentelemetry::{
global, global,
@ -28,6 +29,8 @@ use garage_util::forwarded_headers;
use garage_util::metrics::{gen_trace_id, RecordDuration}; use garage_util::metrics::{gen_trace_id, RecordDuration};
use garage_util::socket_address::UnixOrTCPSocketAddress; use garage_util::socket_address::UnixOrTCPSocketAddress;
use crate::helpers::{BoxBody, BytesBody};
pub(crate) trait ApiEndpoint: Send + Sync + 'static { pub(crate) trait ApiEndpoint: Send + Sync + 'static {
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
fn add_span_attributes(&self, span: SpanRef<'_>); fn add_span_attributes(&self, span: SpanRef<'_>);
@ -36,7 +39,7 @@ pub(crate) trait ApiEndpoint: Send + Sync + 'static {
pub trait ApiError: std::error::Error + Send + Sync + 'static { pub trait ApiError: std::error::Error + Send + Sync + 'static {
fn http_status_code(&self) -> StatusCode; fn http_status_code(&self) -> StatusCode;
fn add_http_headers(&self, header_map: &mut HeaderMap<HeaderValue>); fn add_http_headers(&self, header_map: &mut HeaderMap<HeaderValue>);
fn http_body(&self, garage_region: &str, path: &str) -> Body; fn http_body(&self, garage_region: &str, path: &str) -> BytesBody;
} }
#[async_trait] #[async_trait]
@ -47,12 +50,12 @@ pub(crate) trait ApiHandler: Send + Sync + 'static {
type Endpoint: ApiEndpoint; type Endpoint: ApiEndpoint;
type Error: ApiError; type Error: ApiError;
fn parse_endpoint(&self, r: &Request<Body>) -> Result<Self::Endpoint, Self::Error>; fn parse_endpoint(&self, r: &Request<IncomingBody>) -> Result<Self::Endpoint, Self::Error>;
async fn handle( async fn handle(
&self, &self,
req: Request<Body>, req: Request<IncomingBody>,
endpoint: Self::Endpoint, endpoint: Self::Endpoint,
) -> Result<Response<Body>, Self::Error>; ) -> Result<Response<BoxBody<Self::Error>>, Self::Error>;
} }
pub(crate) struct ApiServer<A: ApiHandler> { pub(crate) struct ApiServer<A: ApiHandler> {
@ -101,72 +104,79 @@ impl<A: ApiHandler> ApiServer<A> {
unix_bind_addr_mode: Option<u32>, unix_bind_addr_mode: Option<u32>,
shutdown_signal: impl Future<Output = ()>, shutdown_signal: impl Future<Output = ()>,
) -> Result<(), GarageError> { ) -> Result<(), GarageError> {
let tcp_service = make_service_fn(|conn: &AddrStream| {
let this = self.clone();
let client_addr = conn.remote_addr();
async move {
Ok::<_, GarageError>(service_fn(move |req: Request<Body>| {
let this = this.clone();
this.handler(req, client_addr.to_string())
}))
}
});
let unix_service = make_service_fn(|_: &UnixStream| {
let this = self.clone();
let path = bind_addr.to_string();
async move {
Ok::<_, GarageError>(service_fn(move |req: Request<Body>| {
let this = this.clone();
this.handler(req, path.clone())
}))
}
});
info!( info!(
"{} API server listening on {}", "{} API server listening on {}",
A::API_NAME_DISPLAY, A::API_NAME_DISPLAY,
bind_addr bind_addr
); );
tokio::pin!(shutdown_signal);
match bind_addr { match bind_addr {
UnixOrTCPSocketAddress::TCPSocket(addr) => { UnixOrTCPSocketAddress::TCPSocket(addr) => {
Server::bind(&addr) let listener = TcpListener::bind(addr).await?;
.serve(tcp_service)
.with_graceful_shutdown(shutdown_signal) loop {
.await? let (stream, client_addr) = tokio::select! {
acc = listener.accept() => acc?,
_ = &mut shutdown_signal => break,
};
self.launch_handler(stream, client_addr.to_string());
}
} }
UnixOrTCPSocketAddress::UnixSocket(ref path) => { UnixOrTCPSocketAddress::UnixSocket(ref path) => {
if path.exists() { if path.exists() {
fs::remove_file(path)? fs::remove_file(path)?
} }
let bound = Server::bind_unix(path)?; let listener = UnixListener::bind(path)?;
fs::set_permissions( fs::set_permissions(
path, path,
Permissions::from_mode(unix_bind_addr_mode.unwrap_or(0o222)), Permissions::from_mode(unix_bind_addr_mode.unwrap_or(0o222)),
)?; )?;
bound loop {
.serve(unix_service) let (stream, _) = tokio::select! {
.with_graceful_shutdown(shutdown_signal) acc = listener.accept() => acc?,
.await?; _ = &mut shutdown_signal => break,
};
self.launch_handler(stream, path.display().to_string());
}
} }
}; };
Ok(()) Ok(())
} }
fn launch_handler<S>(self: &Arc<Self>, stream: S, client_addr: String)
where
S: AsyncRead + AsyncWrite + Send + Sync + 'static,
{
let this = self.clone();
let io = TokioIo::new(stream);
let serve =
move |req: Request<IncomingBody>| this.clone().handler(req, client_addr.to_string());
tokio::task::spawn(async move {
let io = Box::pin(io);
if let Err(e) = http1::Builder::new()
.serve_connection(io, service_fn(serve))
.await
{
debug!("Error handling HTTP connection: {}", e);
}
});
}
async fn handler( async fn handler(
self: Arc<Self>, self: Arc<Self>,
req: Request<Body>, req: Request<IncomingBody>,
addr: String, addr: String,
) -> Result<Response<Body>, GarageError> { ) -> Result<Response<BoxBody<A::Error>>, GarageError> {
let uri = req.uri().clone(); let uri = req.uri().clone();
if let Ok(forwarded_for_ip_addr) = if let Ok(forwarded_for_ip_addr) =
@ -205,7 +215,7 @@ impl<A: ApiHandler> ApiServer<A> {
Ok(x) Ok(x)
} }
Err(e) => { Err(e) => {
let body: Body = e.http_body(&self.region, uri.path()); let body = e.http_body(&self.region, uri.path());
let mut http_error_builder = Response::builder().status(e.http_status_code()); let mut http_error_builder = Response::builder().status(e.http_status_code());
if let Some(header_map) = http_error_builder.headers_mut() { if let Some(header_map) = http_error_builder.headers_mut() {
@ -219,12 +229,15 @@ impl<A: ApiHandler> ApiServer<A> {
} else { } else {
info!("Response: error {}, {}", e.http_status_code(), e); info!("Response: error {}, {}", e.http_status_code(), e);
} }
Ok(http_error) Ok(http_error.map(|body| BoxBody::new(body.map_err(|_| unreachable!()))))
} }
} }
} }
async fn handler_stage2(&self, req: Request<Body>) -> Result<Response<Body>, A::Error> { async fn handler_stage2(
&self,
req: Request<IncomingBody>,
) -> Result<Response<BoxBody<A::Error>>, A::Error> {
let endpoint = self.api_handler.parse_endpoint(&req)?; let endpoint = self.api_handler.parse_endpoint(&req)?;
debug!("Endpoint: {}", endpoint.name()); debug!("Endpoint: {}", endpoint.name());

View file

@ -1,4 +1,5 @@
use hyper::{body::HttpBody, Body, Request, Response}; use http_body_util::{BodyExt, Full as FullBody};
use hyper::{body::Incoming as IncomingBody, Request, Response};
use idna::domain_to_unicode; use idna::domain_to_unicode;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -138,18 +139,36 @@ pub fn key_after_prefix(pfx: &str) -> Option<String> {
None None
} }
pub async fn parse_json_body<T: for<'de> Deserialize<'de>>(req: Request<Body>) -> Result<T, Error> { // =============== body helpers =================
pub type BytesBody = FullBody<bytes::Bytes>;
pub type BoxBody<E> = http_body_util::combinators::BoxBody<bytes::Bytes, E>;
pub fn string_body<E>(s: String) -> BoxBody<E> {
bytes_body(bytes::Bytes::from(s.into_bytes()))
}
pub fn bytes_body<E>(b: bytes::Bytes) -> BoxBody<E> {
BoxBody::new(FullBody::new(b).map_err(|_| unreachable!()))
}
pub fn empty_body<E>() -> BoxBody<E> {
BoxBody::new(http_body_util::Empty::new().map_err(|_| unreachable!()))
}
pub async fn parse_json_body<T>(req: Request<IncomingBody>) -> Result<T, Error>
where
T: for<'de> Deserialize<'de>,
{
let body = req.into_body().collect().await?.to_bytes(); let body = req.into_body().collect().await?.to_bytes();
let resp: T = serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?; let resp: T = serde_json::from_slice(&body).ok_or_bad_request("Invalid JSON")?;
Ok(resp) Ok(resp)
} }
pub fn json_ok_response<T: Serialize>(res: &T) -> Result<Response<Body>, Error> { pub fn json_ok_response<E, T: Serialize>(res: &T) -> Result<Response<BoxBody<E>>, Error> {
let resp_json = serde_json::to_string_pretty(res).map_err(garage_util::error::Error::from)?; let resp_json = serde_json::to_string_pretty(res).map_err(garage_util::error::Error::from)?;
Ok(Response::builder() Ok(Response::builder()
.status(hyper::StatusCode::OK) .status(hyper::StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json") .header(http::header::CONTENT_TYPE, "application/json")
.body(Body::from(resp_json))?) .body(string_body(resp_json))?)
} }
pub fn is_default<T: Default + PartialEq>(v: &T) -> bool { pub fn is_default<T: Default + PartialEq>(v: &T) -> bool {

View file

@ -4,7 +4,7 @@ use async_trait::async_trait;
use futures::future::Future; use futures::future::Future;
use hyper::header; use hyper::header;
use hyper::{Body, Request, Response}; use hyper::{body::Incoming as IncomingBody, Request, Response};
use opentelemetry::{trace::SpanRef, KeyValue}; use opentelemetry::{trace::SpanRef, KeyValue};
@ -34,6 +34,9 @@ use crate::s3::put::*;
use crate::s3::router::Endpoint; use crate::s3::router::Endpoint;
use crate::s3::website::*; use crate::s3::website::*;
pub use crate::signature::streaming::ReqBody;
pub type ResBody = BoxBody<Error>;
pub struct S3ApiServer { pub struct S3ApiServer {
garage: Arc<Garage>, garage: Arc<Garage>,
} }
@ -57,10 +60,10 @@ impl S3ApiServer {
async fn handle_request_without_bucket( async fn handle_request_without_bucket(
&self, &self,
_req: Request<Body>, _req: Request<ReqBody>,
api_key: Key, api_key: Key,
endpoint: Endpoint, endpoint: Endpoint,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
match endpoint { match endpoint {
Endpoint::ListBuckets => handle_list_buckets(&self.garage, &api_key).await, Endpoint::ListBuckets => handle_list_buckets(&self.garage, &api_key).await,
endpoint => Err(Error::NotImplemented(endpoint.name().to_owned())), endpoint => Err(Error::NotImplemented(endpoint.name().to_owned())),
@ -76,7 +79,7 @@ impl ApiHandler for S3ApiServer {
type Endpoint = S3ApiEndpoint; type Endpoint = S3ApiEndpoint;
type Error = Error; type Error = Error;
fn parse_endpoint(&self, req: &Request<Body>) -> Result<S3ApiEndpoint, Error> { fn parse_endpoint(&self, req: &Request<IncomingBody>) -> Result<S3ApiEndpoint, Error> {
let authority = req let authority = req
.headers() .headers()
.get(header::HOST) .get(header::HOST)
@ -104,9 +107,9 @@ impl ApiHandler for S3ApiServer {
async fn handle( async fn handle(
&self, &self,
req: Request<Body>, req: Request<IncomingBody>,
endpoint: S3ApiEndpoint, endpoint: S3ApiEndpoint,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let S3ApiEndpoint { let S3ApiEndpoint {
bucket_name, bucket_name,
endpoint, endpoint,
@ -235,8 +238,7 @@ impl ApiHandler for S3ApiServer {
} }
Endpoint::CreateBucket {} => unreachable!(), Endpoint::CreateBucket {} => unreachable!(),
Endpoint::HeadBucket {} => { Endpoint::HeadBucket {} => {
let empty_body: Body = Body::from(vec![]); let response = Response::builder().body(empty_body()).unwrap();
let response = Response::builder().body(empty_body).unwrap();
Ok(response) Ok(response)
} }
Endpoint::DeleteBucket {} => { Endpoint::DeleteBucket {} => {

View file

@ -1,7 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use hyper::{body::HttpBody, Body, Request, Response, StatusCode}; use http_body_util::BodyExt;
use hyper::{Request, Response, StatusCode};
use garage_model::bucket_alias_table::*; use garage_model::bucket_alias_table::*;
use garage_model::bucket_table::Bucket; use garage_model::bucket_table::Bucket;
@ -14,11 +15,13 @@ use garage_util::data::*;
use garage_util::time::*; use garage_util::time::*;
use crate::common_error::CommonError; use crate::common_error::CommonError;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::xml as s3_xml; use crate::s3::xml as s3_xml;
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>, Error> { pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<ResBody>, Error> {
let loc = s3_xml::LocationConstraint { let loc = s3_xml::LocationConstraint {
xmlns: (), xmlns: (),
region: garage.config.s3_api.s3_region.to_string(), region: garage.config.s3_api.s3_region.to_string(),
@ -27,10 +30,10 @@ pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>,
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(string_body(xml))?)
} }
pub fn handle_get_bucket_versioning() -> Result<Response<Body>, Error> { pub fn handle_get_bucket_versioning() -> Result<Response<ResBody>, Error> {
let versioning = s3_xml::VersioningConfiguration { let versioning = s3_xml::VersioningConfiguration {
xmlns: (), xmlns: (),
status: None, status: None,
@ -40,10 +43,13 @@ pub fn handle_get_bucket_versioning() -> Result<Response<Body>, Error> {
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(string_body(xml))?)
} }
pub async fn handle_list_buckets(garage: &Garage, api_key: &Key) -> Result<Response<Body>, Error> { pub async fn handle_list_buckets(
garage: &Garage,
api_key: &Key,
) -> Result<Response<ResBody>, Error> {
let key_p = api_key.params().ok_or_internal_error( let key_p = api_key.params().ok_or_internal_error(
"Key should not be in deleted state at this point (in handle_list_buckets)", "Key should not be in deleted state at this point (in handle_list_buckets)",
)?; )?;
@ -109,17 +115,17 @@ pub async fn handle_list_buckets(garage: &Garage, api_key: &Key) -> Result<Respo
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml))?) .body(string_body(xml))?)
} }
pub async fn handle_create_bucket( pub async fn handle_create_bucket(
garage: &Garage, garage: &Garage,
req: Request<Body>, req: Request<ReqBody>,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
api_key: Key, api_key: Key,
bucket_name: String, bucket_name: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = req.into_body().collect().await?.to_bytes(); let body = BodyExt::collect(req.into_body()).await?.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -194,7 +200,7 @@ pub async fn handle_create_bucket(
Ok(Response::builder() Ok(Response::builder()
.header("Location", format!("/{}", bucket_name)) .header("Location", format!("/{}", bucket_name))
.body(Body::empty()) .body(empty_body())
.unwrap()) .unwrap())
} }
@ -203,7 +209,7 @@ pub async fn handle_delete_bucket(
bucket_id: Uuid, bucket_id: Uuid,
bucket_name: String, bucket_name: String,
api_key: Key, api_key: Key,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let key_params = api_key let key_params = api_key
.params() .params()
.ok_or_internal_error("Key should not be deleted at this point")?; .ok_or_internal_error("Key should not be deleted at this point")?;
@ -282,7 +288,7 @@ pub async fn handle_delete_bucket(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
fn parse_create_bucket_xml(xml_bytes: &[u8]) -> Option<Option<String>> { fn parse_create_bucket_xml(xml_bytes: &[u8]) -> Option<Option<String>> {

View file

@ -6,7 +6,7 @@ use futures::{stream, stream::Stream, StreamExt};
use md5::{Digest as Md5Digest, Md5}; use md5::{Digest as Md5Digest, Md5};
use bytes::Bytes; use bytes::Bytes;
use hyper::{Body, Request, Response}; use hyper::{Request, Response};
use serde::Serialize; use serde::Serialize;
use garage_rpc::netapp::bytes_buf::BytesBuf; use garage_rpc::netapp::bytes_buf::BytesBuf;
@ -22,7 +22,8 @@ use garage_model::s3::mpu_table::*;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*; use garage_model::s3::version_table::*;
use crate::helpers::parse_bucket_key; use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::multipart; use crate::s3::multipart;
use crate::s3::put::get_headers; use crate::s3::put::get_headers;
@ -31,10 +32,10 @@ use crate::s3::xml::{self as s3_xml, xmlns_tag};
pub async fn handle_copy( pub async fn handle_copy(
garage: Arc<Garage>, garage: Arc<Garage>,
api_key: &Key, api_key: &Key,
req: &Request<Body>, req: &Request<ReqBody>,
dest_bucket_id: Uuid, dest_bucket_id: Uuid,
dest_key: &str, dest_key: &str,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let copy_precondition = CopyPreconditionHeaders::parse(req)?; let copy_precondition = CopyPreconditionHeaders::parse(req)?;
let source_object = get_copy_source(&garage, api_key, req).await?; let source_object = get_copy_source(&garage, api_key, req).await?;
@ -176,18 +177,18 @@ pub async fn handle_copy(
"x-amz-copy-source-version-id", "x-amz-copy-source-version-id",
hex::encode(source_version.uuid), hex::encode(source_version.uuid),
) )
.body(Body::from(xml))?) .body(string_body(xml))?)
} }
pub async fn handle_upload_part_copy( pub async fn handle_upload_part_copy(
garage: Arc<Garage>, garage: Arc<Garage>,
api_key: &Key, api_key: &Key,
req: &Request<Body>, req: &Request<ReqBody>,
dest_bucket_id: Uuid, dest_bucket_id: Uuid,
dest_key: &str, dest_key: &str,
part_number: u64, part_number: u64,
upload_id: &str, upload_id: &str,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let copy_precondition = CopyPreconditionHeaders::parse(req)?; let copy_precondition = CopyPreconditionHeaders::parse(req)?;
let dest_upload_id = multipart::decode_upload_id(upload_id)?; let dest_upload_id = multipart::decode_upload_id(upload_id)?;
@ -432,13 +433,13 @@ pub async fn handle_upload_part_copy(
"x-amz-copy-source-version-id", "x-amz-copy-source-version-id",
hex::encode(source_object_version.uuid), hex::encode(source_object_version.uuid),
) )
.body(Body::from(resp_xml))?) .body(string_body(resp_xml))?)
} }
async fn get_copy_source( async fn get_copy_source(
garage: &Garage, garage: &Garage,
api_key: &Key, api_key: &Key,
req: &Request<Body>, req: &Request<ReqBody>,
) -> Result<Object, Error> { ) -> Result<Object, Error> {
let copy_source = req.headers().get("x-amz-copy-source").unwrap().to_str()?; let copy_source = req.headers().get("x-amz-copy-source").unwrap().to_str()?;
let copy_source = percent_encoding::percent_decode_str(copy_source).decode_utf8()?; let copy_source = percent_encoding::percent_decode_str(copy_source).decode_utf8()?;
@ -501,7 +502,7 @@ struct CopyPreconditionHeaders {
} }
impl CopyPreconditionHeaders { impl CopyPreconditionHeaders {
fn parse(req: &Request<Body>) -> Result<Self, Error> { fn parse(req: &Request<ReqBody>) -> Result<Self, Error> {
Ok(Self { Ok(Self {
copy_source_if_match: req copy_source_if_match: req
.headers() .headers()

View file

@ -5,10 +5,17 @@ use http::header::{
ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD,
}; };
use hyper::{body::HttpBody, header::HeaderName, Body, Method, Request, Response, StatusCode}; use hyper::{
body::Body, body::Incoming as IncomingBody, header::HeaderName, Method, Request, Response,
StatusCode,
};
use http_body_util::BodyExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value}; use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
@ -17,7 +24,7 @@ use garage_model::bucket_table::{Bucket, CorsRule as GarageCorsRule};
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_util::data::*; use garage_util::data::*;
pub async fn handle_get_cors(bucket: &Bucket) -> Result<Response<Body>, Error> { pub async fn handle_get_cors(bucket: &Bucket) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params() .params()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -34,18 +41,18 @@ pub async fn handle_get_cors(bucket: &Bucket) -> Result<Response<Body>, Error> {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/xml") .header(http::header::CONTENT_TYPE, "application/xml")
.body(Body::from(xml))?) .body(string_body(xml))?)
} else { } else {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
} }
pub async fn handle_delete_cors( pub async fn handle_delete_cors(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params_mut() .params_mut()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -55,16 +62,16 @@ pub async fn handle_delete_cors(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
pub async fn handle_put_cors( pub async fn handle_put_cors(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
req: Request<Body>, req: Request<ReqBody>,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = req.into_body().collect().await?.to_bytes(); let body = BodyExt::collect(req.into_body()).await?.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -84,14 +91,14 @@ pub async fn handle_put_cors(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
pub async fn handle_options_s3api( pub async fn handle_options_s3api(
garage: Arc<Garage>, garage: Arc<Garage>,
req: &Request<Body>, req: &Request<IncomingBody>,
bucket_name: Option<String>, bucket_name: Option<String>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
// FIXME: CORS rules of buckets with local aliases are // FIXME: CORS rules of buckets with local aliases are
// not taken into account. // not taken into account.
@ -121,7 +128,7 @@ pub async fn handle_options_s3api(
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*") .header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "*") .header(ACCESS_CONTROL_ALLOW_METHODS, "*")
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
} else { } else {
// If there is no bucket name in the request, // If there is no bucket name in the request,
@ -131,14 +138,14 @@ pub async fn handle_options_s3api(
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*") .header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "GET") .header(ACCESS_CONTROL_ALLOW_METHODS, "GET")
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
} }
pub fn handle_options_for_bucket( pub fn handle_options_for_bucket(
req: &Request<Body>, req: &Request<IncomingBody>,
bucket: &Bucket, bucket: &Bucket,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let origin = req let origin = req
.headers() .headers()
.get("Origin") .get("Origin")
@ -161,7 +168,7 @@ pub fn handle_options_for_bucket(
if let Some(rule) = matching_rule { if let Some(rule) = matching_rule {
let mut resp = Response::builder() let mut resp = Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?; .body(empty_body())?;
add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?; add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?;
return Ok(resp); return Ok(resp);
} }
@ -172,7 +179,7 @@ pub fn handle_options_for_bucket(
pub fn find_matching_cors_rule<'a>( pub fn find_matching_cors_rule<'a>(
bucket: &'a Bucket, bucket: &'a Bucket,
req: &Request<Body>, req: &Request<impl Body>,
) -> Result<Option<&'a GarageCorsRule>, Error> { ) -> Result<Option<&'a GarageCorsRule>, Error> {
if let Some(cors_config) = bucket.params().unwrap().cors_config.get() { if let Some(cors_config) = bucket.params().unwrap().cors_config.get() {
if let Some(origin) = req.headers().get("Origin") { if let Some(origin) = req.headers().get("Origin") {
@ -209,7 +216,7 @@ where
} }
pub fn add_cors_headers( pub fn add_cors_headers(
resp: &mut Response<Body>, resp: &mut Response<ResBody>,
rule: &GarageCorsRule, rule: &GarageCorsRule,
) -> Result<(), http::header::InvalidHeaderValue> { ) -> Result<(), http::header::InvalidHeaderValue> {
let h = resp.headers_mut(); let h = resp.headers_mut();

View file

@ -1,12 +1,15 @@
use std::sync::Arc; use std::sync::Arc;
use hyper::{body::HttpBody, Body, Request, Response, StatusCode}; use http_body_util::BodyExt;
use hyper::{Request, Response, StatusCode};
use garage_util::data::*; use garage_util::data::*;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::put::next_timestamp; use crate::s3::put::next_timestamp;
use crate::s3::xml as s3_xml; use crate::s3::xml as s3_xml;
@ -59,11 +62,11 @@ pub async fn handle_delete(
garage: Arc<Garage>, garage: Arc<Garage>,
bucket_id: Uuid, bucket_id: Uuid,
key: &str, key: &str,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
match handle_delete_internal(&garage, bucket_id, key).await { match handle_delete_internal(&garage, bucket_id, key).await {
Ok(_) | Err(Error::NoSuchKey) => Ok(Response::builder() Ok(_) | Err(Error::NoSuchKey) => Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::from(vec![])) .body(empty_body())
.unwrap()), .unwrap()),
Err(e) => Err(e), Err(e) => Err(e),
} }
@ -72,10 +75,10 @@ pub async fn handle_delete(
pub async fn handle_delete_objects( pub async fn handle_delete_objects(
garage: Arc<Garage>, garage: Arc<Garage>,
bucket_id: Uuid, bucket_id: Uuid,
req: Request<Body>, req: Request<ReqBody>,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = req.into_body().collect().await?.to_bytes(); let body = BodyExt::collect(req.into_body()).await?.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -118,7 +121,7 @@ pub async fn handle_delete_objects(
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml))?) .body(string_body(xml))?)
} }
struct DeleteRequest { struct DeleteRequest {

View file

@ -2,13 +2,14 @@ use std::convert::TryInto;
use err_derive::Error; use err_derive::Error;
use hyper::header::HeaderValue; use hyper::header::HeaderValue;
use hyper::{Body, HeaderMap, StatusCode}; use hyper::{HeaderMap, StatusCode};
use garage_model::helper::error::Error as HelperError; use garage_model::helper::error::Error as HelperError;
use crate::common_error::CommonError; use crate::common_error::CommonError;
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError}; pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
use crate::generic_server::ApiError; use crate::generic_server::ApiError;
use crate::helpers::*;
use crate::s3::xml as s3_xml; use crate::s3::xml as s3_xml;
use crate::signature::error::Error as SignatureError; use crate::signature::error::Error as SignatureError;
@ -189,14 +190,14 @@ impl ApiError for Error {
} }
} }
fn http_body(&self, garage_region: &str, path: &str) -> Body { fn http_body(&self, garage_region: &str, path: &str) -> BytesBody {
let error = s3_xml::Error { let error = s3_xml::Error {
code: s3_xml::Value(self.aws_code().to_string()), code: s3_xml::Value(self.aws_code().to_string()),
message: s3_xml::Value(format!("{}", self)), message: s3_xml::Value(format!("{}", self)),
resource: Some(s3_xml::Value(path.to_string())), resource: Some(s3_xml::Value(path.to_string())),
region: Some(s3_xml::Value(garage_region.to_string())), region: Some(s3_xml::Value(garage_region.to_string())),
}; };
Body::from(s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| { let error_str = s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
r#" r#"
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Error> <Error>
@ -205,6 +206,7 @@ impl ApiError for Error {
</Error> </Error>
"# "#
.into() .into()
})) });
BytesBody::from(bytes::Bytes::from(error_str.into_bytes()))
} }
} }

View file

@ -8,7 +8,7 @@ use http::header::{
ACCEPT_RANGES, CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE, ETAG, IF_MODIFIED_SINCE, ACCEPT_RANGES, CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE, ETAG, IF_MODIFIED_SINCE,
IF_NONE_MATCH, LAST_MODIFIED, RANGE, IF_NONE_MATCH, LAST_MODIFIED, RANGE,
}; };
use hyper::{Body, Request, Response, StatusCode}; use hyper::{body::Body, Request, Response, StatusCode};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use garage_rpc::rpc_helper::{netapp::stream::ByteStream, OrderTag}; use garage_rpc::rpc_helper::{netapp::stream::ByteStream, OrderTag};
@ -20,6 +20,8 @@ use garage_model::garage::Garage;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*; use garage_model::s3::version_table::*;
use crate::helpers::*;
use crate::s3::api_server::ResBody;
use crate::s3::error::*; use crate::s3::error::*;
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count"; const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
@ -52,8 +54,8 @@ fn object_headers(
fn try_answer_cached( fn try_answer_cached(
version: &ObjectVersion, version: &ObjectVersion,
version_meta: &ObjectVersionMeta, version_meta: &ObjectVersionMeta,
req: &Request<Body>, req: &Request<impl Body>,
) -> Option<Response<Body>> { ) -> Option<Response<ResBody>> {
// <trinity> It is possible, and is even usually the case, [that both If-None-Match and // <trinity> It is possible, and is even usually the case, [that both If-None-Match and
// If-Modified-Since] are present in a request. In this situation If-None-Match takes // If-Modified-Since] are present in a request. In this situation If-None-Match takes
// precedence and If-Modified-Since is ignored (as per 6.Precedence from rfc7232). The rational // precedence and If-Modified-Since is ignored (as per 6.Precedence from rfc7232). The rational
@ -80,7 +82,7 @@ fn try_answer_cached(
Some( Some(
Response::builder() Response::builder()
.status(StatusCode::NOT_MODIFIED) .status(StatusCode::NOT_MODIFIED)
.body(Body::empty()) .body(empty_body())
.unwrap(), .unwrap(),
) )
} else { } else {
@ -91,11 +93,11 @@ fn try_answer_cached(
/// Handle HEAD request /// Handle HEAD request
pub async fn handle_head( pub async fn handle_head(
garage: Arc<Garage>, garage: Arc<Garage>,
req: &Request<Body>, req: &Request<impl Body>,
bucket_id: Uuid, bucket_id: Uuid,
key: &str, key: &str,
part_number: Option<u64>, part_number: Option<u64>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let object = garage let object = garage
.object_table .object_table
.get(&bucket_id, &key.to_string()) .get(&bucket_id, &key.to_string())
@ -138,7 +140,7 @@ pub async fn handle_head(
) )
.header(X_AMZ_MP_PARTS_COUNT, "1") .header(X_AMZ_MP_PARTS_COUNT, "1")
.status(StatusCode::PARTIAL_CONTENT) .status(StatusCode::PARTIAL_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
ObjectVersionData::FirstBlock(_, _) => { ObjectVersionData::FirstBlock(_, _) => {
let version = garage let version = garage
@ -163,7 +165,7 @@ pub async fn handle_head(
) )
.header(X_AMZ_MP_PARTS_COUNT, format!("{}", version.n_parts()?)) .header(X_AMZ_MP_PARTS_COUNT, format!("{}", version.n_parts()?))
.status(StatusCode::PARTIAL_CONTENT) .status(StatusCode::PARTIAL_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
_ => unreachable!(), _ => unreachable!(),
} }
@ -171,18 +173,18 @@ pub async fn handle_head(
Ok(object_headers(object_version, version_meta) Ok(object_headers(object_version, version_meta)
.header(CONTENT_LENGTH, format!("{}", version_meta.size)) .header(CONTENT_LENGTH, format!("{}", version_meta.size))
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
} }
/// Handle GET request /// Handle GET request
pub async fn handle_get( pub async fn handle_get(
garage: Arc<Garage>, garage: Arc<Garage>,
req: &Request<Body>, req: &Request<impl Body>,
bucket_id: Uuid, bucket_id: Uuid,
key: &str, key: &str,
part_number: Option<u64>, part_number: Option<u64>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let object = garage let object = garage
.object_table .object_table
.get(&bucket_id, &key.to_string()) .get(&bucket_id, &key.to_string())
@ -240,8 +242,7 @@ pub async fn handle_get(
match &last_v_data { match &last_v_data {
ObjectVersionData::DeleteMarker => unreachable!(), ObjectVersionData::DeleteMarker => unreachable!(),
ObjectVersionData::Inline(_, bytes) => { ObjectVersionData::Inline(_, bytes) => {
let body: Body = Body::from(bytes.to_vec()); Ok(resp_builder.body(bytes_body(bytes.to_vec().into()))?)
Ok(resp_builder.body(body)?)
} }
ObjectVersionData::FirstBlock(_, first_block_hash) => { ObjectVersionData::FirstBlock(_, first_block_hash) => {
let (tx, rx) = mpsc::channel(2); let (tx, rx) = mpsc::channel(2);
@ -293,10 +294,14 @@ pub async fn handle_get(
} }
}); });
let body_stream = tokio_stream::wrappers::ReceiverStream::new(rx).flatten(); let body_stream = tokio_stream::wrappers::ReceiverStream::new(rx)
.flatten()
let body = hyper::body::Body::wrap_stream(body_stream); .map(|x| {
Ok(resp_builder.body(body)?) x.map(hyper::body::Frame::data)
.map_err(|e| Error::from(garage_util::error::Error::from(e)))
});
let body = http_body_util::StreamBody::new(body_stream);
Ok(resp_builder.body(ResBody::new(body))?)
} }
} }
} }
@ -308,7 +313,7 @@ async fn handle_get_range(
version_meta: &ObjectVersionMeta, version_meta: &ObjectVersionMeta,
begin: u64, begin: u64,
end: u64, end: u64,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let resp_builder = object_headers(version, version_meta) let resp_builder = object_headers(version, version_meta)
.header(CONTENT_LENGTH, format!("{}", end - begin)) .header(CONTENT_LENGTH, format!("{}", end - begin))
.header( .header(
@ -321,7 +326,7 @@ async fn handle_get_range(
ObjectVersionData::DeleteMarker => unreachable!(), ObjectVersionData::DeleteMarker => unreachable!(),
ObjectVersionData::Inline(_meta, bytes) => { ObjectVersionData::Inline(_meta, bytes) => {
if end as usize <= bytes.len() { if end as usize <= bytes.len() {
let body: Body = Body::from(bytes[begin as usize..end as usize].to_vec()); let body = bytes_body(bytes[begin as usize..end as usize].to_vec().into());
Ok(resp_builder.body(body)?) Ok(resp_builder.body(body)?)
} else { } else {
Err(Error::internal_error( Err(Error::internal_error(
@ -348,7 +353,7 @@ async fn handle_get_part(
version_data: &ObjectVersionData, version_data: &ObjectVersionData,
version_meta: &ObjectVersionMeta, version_meta: &ObjectVersionMeta,
part_number: u64, part_number: u64,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let resp_builder = let resp_builder =
object_headers(object_version, version_meta).status(StatusCode::PARTIAL_CONTENT); object_headers(object_version, version_meta).status(StatusCode::PARTIAL_CONTENT);
@ -364,7 +369,7 @@ async fn handle_get_part(
format!("bytes {}-{}/{}", 0, bytes.len() - 1, bytes.len()), format!("bytes {}-{}/{}", 0, bytes.len() - 1, bytes.len()),
) )
.header(X_AMZ_MP_PARTS_COUNT, "1") .header(X_AMZ_MP_PARTS_COUNT, "1")
.body(Body::from(bytes.to_vec()))?) .body(bytes_body(bytes.to_vec().into()))?)
} }
ObjectVersionData::FirstBlock(_, _) => { ObjectVersionData::FirstBlock(_, _) => {
let version = garage let version = garage
@ -392,7 +397,7 @@ async fn handle_get_part(
} }
fn parse_range_header( fn parse_range_header(
req: &Request<Body>, req: &Request<impl Body>,
total_size: u64, total_size: u64,
) -> Result<Option<http_range::HttpRange>, Error> { ) -> Result<Option<http_range::HttpRange>, Error> {
let range = match req.headers().get(RANGE) { let range = match req.headers().get(RANGE) {
@ -434,7 +439,7 @@ fn body_from_blocks_range(
all_blocks: &[(VersionBlockKey, VersionBlock)], all_blocks: &[(VersionBlockKey, VersionBlock)],
begin: u64, begin: u64,
end: u64, end: u64,
) -> Body { ) -> ResBody {
// We will store here the list of blocks that have an intersection with the requested // We will store here the list of blocks that have an intersection with the requested
// range, as well as their "true offset", which is their actual offset in the complete // range, as well as their "true offset", which is their actual offset in the complete
// file (whereas block.offset designates the offset of the block WITHIN THE PART // file (whereas block.offset designates the offset of the block WITHIN THE PART
@ -456,7 +461,8 @@ fn body_from_blocks_range(
} }
let order_stream = OrderTag::stream(); let order_stream = OrderTag::stream();
let body_stream = futures::stream::iter(blocks) let mut body_stream =
futures::stream::iter(blocks)
.enumerate() .enumerate()
.map(move |(i, (block, block_offset))| { .map(move |(i, (block, block_offset))| {
let garage = garage.clone(); let garage = garage.clone();
@ -492,8 +498,9 @@ fn body_from_blocks_range(
} else { } else {
end - *chunk_offset end - *chunk_offset
}; };
Some(Some(Ok(chunk_bytes Some(Some(Ok(chunk_bytes.slice(
.slice(start_in_chunk as usize..end_in_chunk as usize)))) start_in_chunk as usize..end_in_chunk as usize,
))))
}; };
*chunk_offset += chunk_bytes.len() as u64; *chunk_offset += chunk_bytes.len() as u64;
r r
@ -504,11 +511,24 @@ fn body_from_blocks_range(
}) })
.filter_map(futures::future::ready) .filter_map(futures::future::ready)
} }
}) });
.buffered(2)
.flatten();
hyper::body::Body::wrap_stream(body_stream) let (tx, rx) = mpsc::channel(2);
tokio::spawn(async move {
while let Some(item) = body_stream.next().await {
if tx.send(item.await).await.is_err() {
break; // connection closed by client
}
}
});
let body_stream = tokio_stream::wrappers::ReceiverStream::new(rx)
.flatten()
.map(|x| {
x.map(hyper::body::Frame::data)
.map_err(|e| Error::from(garage_util::error::Error::from(e)))
});
ResBody::new(http_body_util::StreamBody::new(body_stream))
} }
fn error_stream(i: usize, e: garage_util::error::Error) -> ByteStream { fn error_stream(i: usize, e: garage_util::error::Error) -> ByteStream {

View file

@ -1,10 +1,13 @@
use quick_xml::de::from_reader; use quick_xml::de::from_reader;
use std::sync::Arc; use std::sync::Arc;
use hyper::{body::HttpBody, Body, Request, Response, StatusCode}; use http_body_util::BodyExt;
use hyper::{Request, Response, StatusCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value}; use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
@ -16,7 +19,7 @@ use garage_model::bucket_table::{
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_util::data::*; use garage_util::data::*;
pub async fn handle_get_lifecycle(bucket: &Bucket) -> Result<Response<Body>, Error> { pub async fn handle_get_lifecycle(bucket: &Bucket) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params() .params()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -27,18 +30,18 @@ pub async fn handle_get_lifecycle(bucket: &Bucket) -> Result<Response<Body>, Err
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/xml") .header(http::header::CONTENT_TYPE, "application/xml")
.body(Body::from(xml))?) .body(string_body(xml))?)
} else { } else {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
} }
pub async fn handle_delete_lifecycle( pub async fn handle_delete_lifecycle(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params_mut() .params_mut()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -48,16 +51,16 @@ pub async fn handle_delete_lifecycle(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
pub async fn handle_put_lifecycle( pub async fn handle_put_lifecycle(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
req: Request<Body>, req: Request<ReqBody>,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = req.into_body().collect().await?.to_bytes(); let body = BodyExt::collect(req.into_body()).await?.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -77,7 +80,7 @@ pub async fn handle_put_lifecycle(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
// ---- SERIALIZATION AND DESERIALIZATION TO/FROM S3 XML ---- // ---- SERIALIZATION AND DESERIALIZATION TO/FROM S3 XML ----

View file

@ -3,7 +3,7 @@ use std::iter::{Iterator, Peekable};
use std::sync::Arc; use std::sync::Arc;
use base64::prelude::*; use base64::prelude::*;
use hyper::{Body, Response}; use hyper::Response;
use garage_util::data::*; use garage_util::data::*;
use garage_util::error::Error as GarageError; use garage_util::error::Error as GarageError;
@ -16,7 +16,8 @@ use garage_model::s3::object_table::*;
use garage_table::EnumerationOrder; use garage_table::EnumerationOrder;
use crate::encoding::*; use crate::encoding::*;
use crate::helpers::key_after_prefix; use crate::helpers::*;
use crate::s3::api_server::ResBody;
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::multipart as s3_multipart; use crate::s3::multipart as s3_multipart;
use crate::s3::xml as s3_xml; use crate::s3::xml as s3_xml;
@ -63,7 +64,7 @@ pub struct ListPartsQuery {
pub async fn handle_list( pub async fn handle_list(
garage: Arc<Garage>, garage: Arc<Garage>,
query: &ListObjectsQuery, query: &ListObjectsQuery,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let io = |bucket, key, count| { let io = |bucket, key, count| {
let t = &garage.object_table; let t = &garage.object_table;
async move { async move {
@ -162,13 +163,13 @@ pub async fn handle_list(
let xml = s3_xml::to_xml_with_header(&result)?; let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(string_body(xml))?)
} }
pub async fn handle_list_multipart_upload( pub async fn handle_list_multipart_upload(
garage: Arc<Garage>, garage: Arc<Garage>,
query: &ListMultipartUploadsQuery, query: &ListMultipartUploadsQuery,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let io = |bucket, key, count| { let io = |bucket, key, count| {
let t = &garage.object_table; let t = &garage.object_table;
async move { async move {
@ -264,13 +265,13 @@ pub async fn handle_list_multipart_upload(
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(string_body(xml))?)
} }
pub async fn handle_list_parts( pub async fn handle_list_parts(
garage: Arc<Garage>, garage: Arc<Garage>,
query: &ListPartsQuery, query: &ListPartsQuery,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
debug!("ListParts {:?}", query); debug!("ListParts {:?}", query);
let upload_id = s3_multipart::decode_upload_id(&query.upload_id)?; let upload_id = s3_multipart::decode_upload_id(&query.upload_id)?;
@ -319,7 +320,7 @@ pub async fn handle_list_parts(
Ok(Response::builder() Ok(Response::builder()
.header("Content-Type", "application/xml") .header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?) .body(string_body(xml))?)
} }
/* /*

View file

@ -2,8 +2,8 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use futures::{prelude::*, TryStreamExt}; use futures::{prelude::*, TryStreamExt};
use hyper::body::Body; use http_body_util::BodyStream;
use hyper::{body::HttpBody, Request, Response}; use hyper::{Request, Response};
use md5::{Digest as Md5Digest, Md5}; use md5::{Digest as Md5Digest, Md5};
use garage_table::*; use garage_table::*;
@ -17,6 +17,8 @@ use garage_model::s3::mpu_table::*;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*; use garage_model::s3::version_table::*;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::put::*; use crate::s3::put::*;
use crate::s3::xml as s3_xml; use crate::s3::xml as s3_xml;
@ -26,11 +28,11 @@ use crate::signature::verify_signed_content;
pub async fn handle_create_multipart_upload( pub async fn handle_create_multipart_upload(
garage: Arc<Garage>, garage: Arc<Garage>,
req: &Request<Body>, req: &Request<ReqBody>,
bucket_name: &str, bucket_name: &str,
bucket_id: Uuid, bucket_id: Uuid,
key: &String, key: &String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let existing_object = garage.object_table.get(&bucket_id, &key).await?; let existing_object = garage.object_table.get(&bucket_id, &key).await?;
let upload_id = gen_uuid(); let upload_id = gen_uuid();
@ -65,18 +67,18 @@ pub async fn handle_create_multipart_upload(
}; };
let xml = s3_xml::to_xml_with_header(&result)?; let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::new(Body::from(xml.into_bytes()))) Ok(Response::new(string_body(xml)))
} }
pub async fn handle_put_part( pub async fn handle_put_part(
garage: Arc<Garage>, garage: Arc<Garage>,
req: Request<Body>, req: Request<ReqBody>,
bucket_id: Uuid, bucket_id: Uuid,
key: &str, key: &str,
part_number: u64, part_number: u64,
upload_id: &str, upload_id: &str,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let upload_id = decode_upload_id(upload_id)?; let upload_id = decode_upload_id(upload_id)?;
let content_md5 = match req.headers().get("content-md5") { let content_md5 = match req.headers().get("content-md5") {
@ -87,8 +89,10 @@ pub async fn handle_put_part(
// Read first chuck, and at the same time try to get object to see if it exists // Read first chuck, and at the same time try to get object to see if it exists
let key = key.to_string(); let key = key.to_string();
let body = TryStreamExt::map_err(req.into_body(), Error::from); let body_stream = BodyStream::new(req.into_body())
let mut chunker = StreamChunker::new(body, garage.config.block_size); .map(|x| x.map(|f| f.into_data().unwrap())) //TODO remove unwrap
.map_err(Error::from);
let mut chunker = StreamChunker::new(body_stream, garage.config.block_size);
let ((_, _, mut mpu), first_block) = futures::try_join!( let ((_, _, mut mpu), first_block) = futures::try_join!(
get_upload(&garage, &bucket_id, &key, &upload_id), get_upload(&garage, &bucket_id, &key, &upload_id),
@ -172,7 +176,7 @@ pub async fn handle_put_part(
let response = Response::builder() let response = Response::builder()
.header("ETag", format!("\"{}\"", data_md5sum_hex)) .header("ETag", format!("\"{}\"", data_md5sum_hex))
.body(Body::empty()) .body(empty_body())
.unwrap(); .unwrap();
Ok(response) Ok(response)
} }
@ -210,14 +214,16 @@ impl Drop for InterruptedCleanup {
pub async fn handle_complete_multipart_upload( pub async fn handle_complete_multipart_upload(
garage: Arc<Garage>, garage: Arc<Garage>,
req: Request<Body>, req: Request<ReqBody>,
bucket_name: &str, bucket_name: &str,
bucket: &Bucket, bucket: &Bucket,
key: &str, key: &str,
upload_id: &str, upload_id: &str,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = HttpBody::collect(req.into_body()).await?.to_bytes(); let body = http_body_util::BodyExt::collect(req.into_body())
.await?
.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -365,7 +371,7 @@ pub async fn handle_complete_multipart_upload(
}; };
let xml = s3_xml::to_xml_with_header(&result)?; let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::new(Body::from(xml.into_bytes()))) Ok(Response::new(string_body(xml)))
} }
pub async fn handle_abort_multipart_upload( pub async fn handle_abort_multipart_upload(
@ -373,7 +379,7 @@ pub async fn handle_abort_multipart_upload(
bucket_id: Uuid, bucket_id: Uuid,
key: &str, key: &str,
upload_id: &str, upload_id: &str,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let upload_id = decode_upload_id(upload_id)?; let upload_id = decode_upload_id(upload_id)?;
let (_, mut object_version, _) = let (_, mut object_version, _) =
@ -383,7 +389,7 @@ pub async fn handle_abort_multipart_upload(
let final_object = Object::new(bucket_id, key.to_string(), vec![object_version]); let final_object = Object::new(bucket_id, key.to_string(), vec![object_version]);
garage.object_table.insert(&final_object).await?; garage.object_table.insert(&final_object).await?;
Ok(Response::new(Body::from(vec![]))) Ok(Response::new(empty_body()))
} }
// ======== helpers ============ // ======== helpers ============

View file

@ -1,5 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryInto; use std::convert::{Infallible, TryInto};
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use std::sync::Arc; use std::sync::Arc;
use std::task::{Context, Poll}; use std::task::{Context, Poll};
@ -7,14 +7,17 @@ use std::task::{Context, Poll};
use base64::prelude::*; use base64::prelude::*;
use bytes::Bytes; use bytes::Bytes;
use chrono::{DateTime, Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt, TryStreamExt};
use http_body_util::BodyStream;
use hyper::header::{self, HeaderMap, HeaderName, HeaderValue}; use hyper::header::{self, HeaderMap, HeaderName, HeaderValue};
use hyper::{Body, Request, Response, StatusCode}; use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
use multer::{Constraints, Multipart, SizeLimit}; use multer::{Constraints, Multipart, SizeLimit};
use serde::Deserialize; use serde::Deserialize;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use crate::helpers::*;
use crate::s3::api_server::ResBody;
use crate::s3::cors::*; use crate::s3::cors::*;
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::put::{get_headers, save_stream}; use crate::s3::put::{get_headers, save_stream};
@ -23,9 +26,9 @@ use crate::signature::payload::{parse_date, verify_v4};
pub async fn handle_post_object( pub async fn handle_post_object(
garage: Arc<Garage>, garage: Arc<Garage>,
req: Request<Body>, req: Request<IncomingBody>,
bucket_name: String, bucket_name: String,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let boundary = req let boundary = req
.headers() .headers()
.get(header::CONTENT_TYPE) .get(header::CONTENT_TYPE)
@ -42,7 +45,10 @@ pub async fn handle_post_object(
); );
let (head, body) = req.into_parts(); let (head, body) = req.into_parts();
let mut multipart = Multipart::with_constraints(body, boundary, constraints); let body_stream = BodyStream::new(body)
.map(|x| x.map(|f| f.into_data().unwrap())) //TODO remove unwrap
.map_err(Error::from);
let mut multipart = Multipart::with_constraints(body_stream, boundary, constraints);
let mut params = HeaderMap::new(); let mut params = HeaderMap::new();
let field = loop { let field = loop {
@ -259,7 +265,7 @@ pub async fn handle_post_object(
.status(StatusCode::SEE_OTHER) .status(StatusCode::SEE_OTHER)
.header(header::LOCATION, target.clone()) .header(header::LOCATION, target.clone())
.header(header::ETAG, etag) .header(header::ETAG, etag)
.body(target.into())? .body(string_body(target))?
} else { } else {
let path = head let path = head
.uri .uri
@ -290,7 +296,7 @@ pub async fn handle_post_object(
.header(header::LOCATION, location.clone()) .header(header::LOCATION, location.clone())
.header(header::ETAG, etag.clone()); .header(header::ETAG, etag.clone());
match action { match action {
"200" => builder.status(StatusCode::OK).body(Body::empty())?, "200" => builder.status(StatusCode::OK).body(empty_body())?,
"201" => { "201" => {
let xml = s3_xml::PostObject { let xml = s3_xml::PostObject {
xmlns: (), xmlns: (),
@ -302,14 +308,16 @@ pub async fn handle_post_object(
let body = s3_xml::to_xml_with_header(&xml)?; let body = s3_xml::to_xml_with_header(&xml)?;
builder builder
.status(StatusCode::CREATED) .status(StatusCode::CREATED)
.body(Body::from(body.into_bytes()))? .body(string_body(body))?
} }
_ => builder.status(StatusCode::NO_CONTENT).body(Body::empty())?, _ => builder.status(StatusCode::NO_CONTENT).body(empty_body())?,
} }
}; };
let matching_cors_rule = let matching_cors_rule = find_matching_cors_rule(
find_matching_cors_rule(&bucket, &Request::from_parts(head, Body::empty()))?; &bucket,
&Request::from_parts(head, empty_body::<Infallible>()),
)?;
if let Some(rule) = matching_cors_rule { if let Some(rule) = matching_cors_rule {
add_cors_headers(&mut resp, rule) add_cors_headers(&mut resp, rule)
.ok_or_internal_error("Invalid bucket CORS configuration")?; .ok_or_internal_error("Invalid bucket CORS configuration")?;

View file

@ -4,7 +4,8 @@ use std::sync::Arc;
use base64::prelude::*; use base64::prelude::*;
use futures::prelude::*; use futures::prelude::*;
use futures::try_join; use futures::try_join;
use hyper::body::{Body, Bytes}; use http_body_util::BodyStream;
use hyper::body::Bytes;
use hyper::header::{HeaderMap, HeaderValue}; use hyper::header::{HeaderMap, HeaderValue};
use hyper::{Request, Response}; use hyper::{Request, Response};
use md5::{digest::generic_array::*, Digest as Md5Digest, Md5}; use md5::{digest::generic_array::*, Digest as Md5Digest, Md5};
@ -30,15 +31,17 @@ use garage_model::s3::block_ref_table::*;
use garage_model::s3::object_table::*; use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*; use garage_model::s3::version_table::*;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
pub async fn handle_put( pub async fn handle_put(
garage: Arc<Garage>, garage: Arc<Garage>,
req: Request<Body>, req: Request<ReqBody>,
bucket: &Bucket, bucket: &Bucket,
key: &String, key: &String,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
// Retrieve interesting headers from request // Retrieve interesting headers from request
let headers = get_headers(req.headers())?; let headers = get_headers(req.headers())?;
debug!("Object headers: {:?}", headers); debug!("Object headers: {:?}", headers);
@ -48,13 +51,14 @@ pub async fn handle_put(
None => None, None => None,
}; };
let (_head, body) = req.into_parts(); let body_stream = BodyStream::new(req.into_body())
let body = body.map_err(Error::from); .map(|x| x.map(|f| f.into_data().unwrap())) //TODO remove unwrap
.map_err(Error::from);
save_stream( save_stream(
garage, garage,
headers, headers,
body, body_stream,
bucket, bucket,
key, key,
content_md5, content_md5,
@ -434,11 +438,11 @@ impl<S: Stream<Item = Result<Bytes, Error>> + Unpin> StreamChunker<S> {
} }
} }
pub fn put_response(version_uuid: Uuid, md5sum_hex: String) -> Response<Body> { pub fn put_response(version_uuid: Uuid, md5sum_hex: String) -> Response<ResBody> {
Response::builder() Response::builder()
.header("x-amz-version-id", hex::encode(version_uuid)) .header("x-amz-version-id", hex::encode(version_uuid))
.header("ETag", format!("\"{}\"", md5sum_hex)) .header("ETag", format!("\"{}\"", md5sum_hex))
.body(Body::from(vec![])) .body(empty_body())
.unwrap() .unwrap()
} }

View file

@ -1,9 +1,12 @@
use quick_xml::de::from_reader; use quick_xml::de::from_reader;
use std::sync::Arc; use std::sync::Arc;
use hyper::{body::HttpBody, Body, Request, Response, StatusCode}; use http_body_util::BodyExt;
use hyper::{Request, Response, StatusCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*; use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value}; use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content; use crate::signature::verify_signed_content;
@ -12,7 +15,7 @@ use garage_model::bucket_table::*;
use garage_model::garage::Garage; use garage_model::garage::Garage;
use garage_util::data::*; use garage_util::data::*;
pub async fn handle_get_website(bucket: &Bucket) -> Result<Response<Body>, Error> { pub async fn handle_get_website(bucket: &Bucket) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params() .params()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -33,18 +36,18 @@ pub async fn handle_get_website(bucket: &Bucket) -> Result<Response<Body>, Error
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/xml") .header(http::header::CONTENT_TYPE, "application/xml")
.body(Body::from(xml))?) .body(string_body(xml))?)
} else { } else {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
} }
pub async fn handle_delete_website( pub async fn handle_delete_website(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let param = bucket let param = bucket
.params_mut() .params_mut()
.ok_or_internal_error("Bucket should not be deleted at this point")?; .ok_or_internal_error("Bucket should not be deleted at this point")?;
@ -54,16 +57,16 @@ pub async fn handle_delete_website(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::NO_CONTENT) .status(StatusCode::NO_CONTENT)
.body(Body::empty())?) .body(empty_body())?)
} }
pub async fn handle_put_website( pub async fn handle_put_website(
garage: Arc<Garage>, garage: Arc<Garage>,
mut bucket: Bucket, mut bucket: Bucket,
req: Request<Body>, req: Request<ReqBody>,
content_sha256: Option<Hash>, content_sha256: Option<Hash>,
) -> Result<Response<Body>, Error> { ) -> Result<Response<ResBody>, Error> {
let body = req.into_body().collect().await?.to_bytes(); let body = BodyExt::collect(req.into_body()).await?.to_bytes();
if let Some(content_sha256) = content_sha256 { if let Some(content_sha256) = content_sha256 {
verify_signed_content(content_sha256, &body[..])?; verify_signed_content(content_sha256, &body[..])?;
@ -83,7 +86,7 @@ pub async fn handle_put_website(
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::OK) .status(StatusCode::OK)
.body(Body::empty())?) .body(empty_body())?)
} }
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]

View file

@ -2,7 +2,7 @@ use std::collections::HashMap;
use chrono::{DateTime, Duration, NaiveDateTime, Utc}; use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use hmac::Mac; use hmac::Mac;
use hyper::{Body, Method, Request}; use hyper::{body::Incoming as IncomingBody, Method, Request};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use garage_table::*; use garage_table::*;
@ -20,7 +20,7 @@ use crate::signature::error::*;
pub async fn check_payload_signature( pub async fn check_payload_signature(
garage: &Garage, garage: &Garage,
service: &'static str, service: &'static str,
request: &Request<Body>, request: &Request<IncomingBody>,
) -> Result<(Option<Key>, Option<Hash>), Error> { ) -> Result<(Option<Key>, Option<Hash>), Error> {
let mut headers = HashMap::new(); let mut headers = HashMap::new();
for (key, val) in request.headers() { for (key, val) in request.headers() {

View file

@ -5,22 +5,26 @@ use futures::prelude::*;
use futures::task; use futures::task;
use garage_model::key_table::Key; use garage_model::key_table::Key;
use hmac::Mac; use hmac::Mac;
use hyper::body::Bytes; use http_body_util::{BodyStream, StreamBody};
use hyper::{Body, Request}; use hyper::body::{Bytes, Incoming as IncomingBody};
use hyper::Request;
use garage_util::data::Hash; use garage_util::data::Hash;
use super::{compute_scope, sha256sum, HmacSha256, LONG_DATETIME}; use super::{compute_scope, sha256sum, HmacSha256, LONG_DATETIME};
use crate::helpers::*;
use crate::signature::error::*; use crate::signature::error::*;
pub type ReqBody = BoxBody<Error>;
pub fn parse_streaming_body( pub fn parse_streaming_body(
api_key: &Key, api_key: &Key,
req: Request<Body>, req: Request<IncomingBody>,
content_sha256: &mut Option<Hash>, content_sha256: &mut Option<Hash>,
region: &str, region: &str,
service: &str, service: &str,
) -> Result<Request<Body>, Error> { ) -> Result<Request<ReqBody>, Error> {
match req.headers().get("x-amz-content-sha256") { match req.headers().get("x-amz-content-sha256") {
Some(header) if header == "STREAMING-AWS4-HMAC-SHA256-PAYLOAD" => { Some(header) if header == "STREAMING-AWS4-HMAC-SHA256-PAYLOAD" => {
let signature = content_sha256 let signature = content_sha256
@ -47,19 +51,17 @@ pub fn parse_streaming_body(
.ok_or_internal_error("Unable to build signing HMAC")?; .ok_or_internal_error("Unable to build signing HMAC")?;
Ok(req.map(move |body| { Ok(req.map(move |body| {
Body::wrap_stream( let body_stream = BodyStream::new(body)
SignedPayloadStream::new( .map(|x| x.map(|f| f.into_data().unwrap())) //TODO remove unwrap
body.map_err(Error::from), .map_err(Error::from);
signing_hmac, let signed_payload_stream =
date, SignedPayloadStream::new(body_stream, signing_hmac, date, &scope, signature)
&scope, .map(|x| x.map(hyper::body::Frame::data))
signature, .map_err(Error::from);
) ReqBody::new(StreamBody::new(signed_payload_stream))
.map_err(Error::from),
)
})) }))
} }
_ => Ok(req), _ => Ok(req.map(|body| ReqBody::new(http_body_util::BodyExt::map_err(body, Error::from)))),
} }
} }

View file

@ -26,8 +26,9 @@ percent-encoding.workspace = true
futures.workspace = true futures.workspace = true
http.workspace = true http.workspace = true
http-body-util.workspace = true
hyper.workspace = true hyper.workspace = true
hyperlocal.workspace = true hyper-util.workspace = true
tokio.workspace = true tokio.workspace = true

View file

@ -4,16 +4,17 @@ use std::{convert::Infallible, sync::Arc};
use futures::future::Future; use futures::future::Future;
use hyper::server::conn::http1;
use hyper::{ use hyper::{
body::Incoming as IncomingBody,
header::{HeaderValue, HOST}, header::{HeaderValue, HOST},
server::conn::AddrStream, service::service_fn,
service::{make_service_fn, service_fn}, Method, Request, Response, StatusCode,
Body, Method, Request, Response, Server, StatusCode,
}; };
use hyper_util::rt::TokioIo;
use hyperlocal::UnixServerExt; use tokio::io::{AsyncRead, AsyncWrite};
use tokio::net::{TcpListener, UnixListener};
use tokio::net::UnixStream;
use opentelemetry::{ use opentelemetry::{
global, global,
@ -24,7 +25,7 @@ use opentelemetry::{
use crate::error::*; use crate::error::*;
use garage_api::helpers::{authority_to_host, host_to_bucket}; use garage_api::helpers::*;
use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket}; use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket};
use garage_api::s3::error::{ use garage_api::s3::error::{
CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError, CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError,
@ -76,7 +77,7 @@ impl WebServer {
/// Run a web server /// Run a web server
pub async fn run( pub async fn run(
garage: Arc<Garage>, garage: Arc<Garage>,
addr: UnixOrTCPSocketAddress, bind_addr: UnixOrTCPSocketAddress,
root_domain: String, root_domain: String,
shutdown_signal: impl Future<Output = ()>, shutdown_signal: impl Future<Output = ()>,
) -> Result<(), GarageError> { ) -> Result<(), GarageError> {
@ -87,65 +88,73 @@ impl WebServer {
root_domain, root_domain,
}); });
let tcp_service = make_service_fn(|conn: &AddrStream| { info!("Web server listening on {}", bind_addr);
let web_server = web_server.clone();
let client_addr = conn.remote_addr(); tokio::pin!(shutdown_signal);
async move {
Ok::<_, Error>(service_fn(move |req: Request<Body>| {
let web_server = web_server.clone();
web_server.handle_request(req, client_addr.to_string()) match bind_addr {
}))
}
});
let unix_service = make_service_fn(|_: &UnixStream| {
let web_server = web_server.clone();
let path = addr.to_string();
async move {
Ok::<_, Error>(service_fn(move |req: Request<Body>| {
let web_server = web_server.clone();
web_server.handle_request(req, path.clone())
}))
}
});
info!("Web server listening on {}", addr);
match addr {
UnixOrTCPSocketAddress::TCPSocket(addr) => { UnixOrTCPSocketAddress::TCPSocket(addr) => {
Server::bind(&addr) let listener = TcpListener::bind(addr).await?;
.serve(tcp_service)
.with_graceful_shutdown(shutdown_signal) loop {
.await? let (stream, client_addr) = tokio::select! {
acc = listener.accept() => acc?,
_ = &mut shutdown_signal => break,
};
web_server.launch_handler(stream, client_addr.to_string());
}
} }
UnixOrTCPSocketAddress::UnixSocket(ref path) => { UnixOrTCPSocketAddress::UnixSocket(ref path) => {
if path.exists() { if path.exists() {
fs::remove_file(path)? fs::remove_file(path)?
} }
let bound = Server::bind_unix(path)?; let listener = UnixListener::bind(path)?;
fs::set_permissions(path, Permissions::from_mode(0o222))?; fs::set_permissions(path, Permissions::from_mode(0o222))?;
bound loop {
.serve(unix_service) let (stream, _) = tokio::select! {
.with_graceful_shutdown(shutdown_signal) acc = listener.accept() => acc?,
.await?; _ = &mut shutdown_signal => break,
};
web_server.launch_handler(stream, path.display().to_string());
}
} }
}; };
Ok(()) Ok(())
} }
fn launch_handler<S>(self: &Arc<Self>, stream: S, client_addr: String)
where
S: AsyncRead + AsyncWrite + Send + Sync + 'static,
{
let this = self.clone();
let io = TokioIo::new(stream);
let serve = move |req: Request<IncomingBody>| {
this.clone().handle_request(req, client_addr.to_string())
};
tokio::task::spawn(async move {
let io = Box::pin(io);
if let Err(e) = http1::Builder::new()
.serve_connection(io, service_fn(serve))
.await
{
debug!("Error handling HTTP connection: {}", e);
}
});
}
async fn handle_request( async fn handle_request(
self: Arc<Self>, self: Arc<Self>,
req: Request<Body>, req: Request<IncomingBody>,
addr: String, addr: String,
) -> Result<Response<Body>, Infallible> { ) -> Result<Response<BoxBody<Error>>, Infallible> {
if let Ok(forwarded_for_ip_addr) = if let Ok(forwarded_for_ip_addr) =
forwarded_headers::handle_forwarded_for_headers(req.headers()) forwarded_headers::handle_forwarded_for_headers(req.headers())
{ {
@ -187,7 +196,8 @@ impl WebServer {
match res { match res {
Ok(res) => { Ok(res) => {
debug!("{} {} {}", req.method(), res.status(), req.uri()); debug!("{} {} {}", req.method(), res.status(), req.uri());
Ok(res) Ok(res
.map(|body| BoxBody::new(http_body_util::BodyExt::map_err(body, Error::from))))
} }
Err(error) => { Err(error) => {
info!( info!(
@ -220,7 +230,10 @@ impl WebServer {
Ok(exists) Ok(exists)
} }
async fn serve_file(self: &Arc<Self>, req: &Request<Body>) -> Result<Response<Body>, Error> { async fn serve_file(
self: &Arc<Self>,
req: &Request<IncomingBody>,
) -> Result<Response<BoxBody<ApiError>>, Error> {
// Get http authority string (eg. [::1]:3902 or garage.tld:80) // Get http authority string (eg. [::1]:3902 or garage.tld:80)
let authority = req let authority = req
.headers() .headers()
@ -268,8 +281,8 @@ impl WebServer {
let ret_doc = match *req.method() { let ret_doc = match *req.method() {
Method::OPTIONS => handle_options_for_bucket(req, &bucket), Method::OPTIONS => handle_options_for_bucket(req, &bucket),
Method::HEAD => handle_head(self.garage.clone(), req, bucket_id, &key, None).await, Method::HEAD => handle_head(self.garage.clone(), &req, bucket_id, &key, None).await,
Method::GET => handle_get(self.garage.clone(), req, bucket_id, &key, None).await, Method::GET => handle_get(self.garage.clone(), &req, bucket_id, &key, None).await,
_ => Err(ApiError::bad_request("HTTP method not supported")), _ => Err(ApiError::bad_request("HTTP method not supported")),
}; };
@ -281,7 +294,7 @@ impl WebServer {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::FOUND) .status(StatusCode::FOUND)
.header("Location", url) .header("Location", url)
.body(Body::empty()) .body(empty_body())
.unwrap()) .unwrap())
} }
_ => ret_doc, _ => ret_doc,
@ -310,7 +323,7 @@ impl WebServer {
// Create a fake HTTP request with path = the error document // Create a fake HTTP request with path = the error document
let req2 = Request::builder() let req2 = Request::builder()
.uri(format!("http://{}/{}", host, &error_document)) .uri(format!("http://{}/{}", host, &error_document))
.body(Body::empty()) .body(empty_body::<Infallible>())
.unwrap(); .unwrap();
match handle_get(self.garage.clone(), &req2, bucket_id, &error_document, None).await match handle_get(self.garage.clone(), &req2, bucket_id, &error_document, None).await
@ -358,7 +371,7 @@ impl WebServer {
} }
} }
fn error_to_res(e: Error) -> Response<Body> { fn error_to_res(e: Error) -> Response<BoxBody<Error>> {
// If we are here, it is either that: // If we are here, it is either that:
// - there was an error before trying to get the requested URL // - there was an error before trying to get the requested URL
// from the bucket (e.g. bucket not found) // from the bucket (e.g. bucket not found)
@ -366,7 +379,7 @@ fn error_to_res(e: Error) -> Response<Body> {
// was a HEAD request or we couldn't get the error document) // was a HEAD request or we couldn't get the error document)
// We do NOT enter this code path when returning the bucket's // We do NOT enter this code path when returning the bucket's
// error document (this is handled in serve_file) // error document (this is handled in serve_file)
let body = Body::from(format!("{}\n", e)); let body = string_body(format!("{}\n", e));
let mut http_error = Response::new(body); let mut http_error = Response::new(body);
*http_error.status_mut() = e.http_status_code(); *http_error.status_mut() = e.http_status_code();
e.add_headers(http_error.headers_mut()); e.add_headers(http_error.headers_mut());