Compare commits
No commits in common. "main" and "snapshot_consistency_sqlite" have entirely different histories.
main
...
snapshot_c
98 changed files with 8624 additions and 1994 deletions
3
.cargo/config.toml
Normal file
3
.cargo/config.toml
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[target.x86_64-unknown-linux-gnu]
|
||||||
|
linker = "clang"
|
||||||
|
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
|
@ -16,21 +16,31 @@ steps:
|
||||||
- name: build
|
- name: build
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
commands:
|
commands:
|
||||||
- nix-build -j4 --attr flakePackages.dev
|
- nix-build --no-build-output --attr pkgs.amd64.debug --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
|
||||||
|
|
||||||
- name: unit + func tests (lmdb)
|
- name: unit + func tests
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
|
environment:
|
||||||
|
GARAGE_TEST_INTEGRATION_EXE: result-bin/bin/garage
|
||||||
|
GARAGE_TEST_INTEGRATION_PATH: tmp-garage-integration
|
||||||
commands:
|
commands:
|
||||||
- nix-build -j4 --attr flakePackages.tests-lmdb
|
- nix-build --no-build-output --attr test.amd64 --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
|
||||||
|
- ./result/bin/garage_db-*
|
||||||
- name: unit + func tests (sqlite)
|
- ./result/bin/garage_api-*
|
||||||
image: nixpkgs/nix:nixos-22.05
|
- ./result/bin/garage_model-*
|
||||||
commands:
|
- ./result/bin/garage_rpc-*
|
||||||
- nix-build -j4 --attr flakePackages.tests-sqlite
|
- ./result/bin/garage_table-*
|
||||||
|
- ./result/bin/garage_util-*
|
||||||
|
- ./result/bin/garage_web-*
|
||||||
|
- ./result/bin/garage-*
|
||||||
|
- GARAGE_TEST_INTEGRATION_DB_ENGINE=lmdb ./result/bin/integration-* || (cat tmp-garage-integration/stderr.log; false)
|
||||||
|
- nix-shell --attr ci --run "killall -9 garage" || true
|
||||||
|
- GARAGE_TEST_INTEGRATION_DB_ENGINE=sqlite ./result/bin/integration-* || (cat tmp-garage-integration/stderr.log; false)
|
||||||
|
- rm result
|
||||||
|
- rm -rv tmp-garage-integration
|
||||||
|
|
||||||
- name: integration tests
|
- name: integration tests
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
commands:
|
commands:
|
||||||
- nix-build -j4 --attr flakePackages.dev
|
- nix-build --no-build-output --attr pkgs.amd64.debug --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
|
||||||
- nix-shell --attr ci --run ./script/test-smoke.sh || (cat /tmp/garage.log; false)
|
- nix-shell --attr ci --run ./script/test-smoke.sh || (cat /tmp/garage.log; false)
|
||||||
depends_on: [ build ]
|
|
||||||
|
|
|
@ -18,12 +18,12 @@ steps:
|
||||||
- name: build
|
- name: build
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
commands:
|
commands:
|
||||||
- nix-build --attr releasePackages.${ARCH} --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
|
- nix-build --no-build-output --attr pkgs.${ARCH}.release --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
|
||||||
|
|
||||||
- name: check is static binary
|
- name: check is static binary
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
commands:
|
commands:
|
||||||
- nix-shell --attr ci --run "./script/not-dynamic.sh result/bin/garage"
|
- nix-shell --attr ci --run "./script/not-dynamic.sh result-bin/bin/garage"
|
||||||
|
|
||||||
- name: integration tests
|
- name: integration tests
|
||||||
image: nixpkgs/nix:nixos-22.05
|
image: nixpkgs/nix:nixos-22.05
|
||||||
|
|
1669
Cargo.lock
generated
1669
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
15
Cargo.toml
15
Cargo.toml
|
@ -8,10 +8,7 @@ members = [
|
||||||
"src/table",
|
"src/table",
|
||||||
"src/block",
|
"src/block",
|
||||||
"src/model",
|
"src/model",
|
||||||
"src/api/common",
|
"src/api",
|
||||||
"src/api/s3",
|
|
||||||
"src/api/k2v",
|
|
||||||
"src/api/admin",
|
|
||||||
"src/web",
|
"src/web",
|
||||||
"src/garage",
|
"src/garage",
|
||||||
"src/k2v-client",
|
"src/k2v-client",
|
||||||
|
@ -24,10 +21,7 @@ default-members = ["src/garage"]
|
||||||
|
|
||||||
# Internal Garage crates
|
# Internal Garage crates
|
||||||
format_table = { version = "0.1.1", path = "src/format-table" }
|
format_table = { version = "0.1.1", path = "src/format-table" }
|
||||||
garage_api_common = { version = "1.0.1", path = "src/api/common" }
|
garage_api = { version = "1.0.1", path = "src/api" }
|
||||||
garage_api_admin = { version = "1.0.1", path = "src/api/admin" }
|
|
||||||
garage_api_s3 = { version = "1.0.1", path = "src/api/s3" }
|
|
||||||
garage_api_k2v = { version = "1.0.1", path = "src/api/k2v" }
|
|
||||||
garage_block = { version = "1.0.1", path = "src/block" }
|
garage_block = { version = "1.0.1", path = "src/block" }
|
||||||
garage_db = { version = "1.0.1", path = "src/db", default-features = false }
|
garage_db = { version = "1.0.1", path = "src/db", default-features = false }
|
||||||
garage_model = { version = "1.0.1", path = "src/model", default-features = false }
|
garage_model = { version = "1.0.1", path = "src/model", default-features = false }
|
||||||
|
@ -52,6 +46,7 @@ chrono = "0.4"
|
||||||
crc32fast = "1.4"
|
crc32fast = "1.4"
|
||||||
crc32c = "0.6"
|
crc32c = "0.6"
|
||||||
crypto-common = "0.1"
|
crypto-common = "0.1"
|
||||||
|
digest = "0.10"
|
||||||
err-derive = "0.3"
|
err-derive = "0.3"
|
||||||
gethostname = "0.4"
|
gethostname = "0.4"
|
||||||
git-version = "0.3.4"
|
git-version = "0.3.4"
|
||||||
|
@ -64,7 +59,7 @@ ipnet = "2.9.0"
|
||||||
lazy_static = "1.4"
|
lazy_static = "1.4"
|
||||||
md-5 = "0.10"
|
md-5 = "0.10"
|
||||||
mktemp = "0.5"
|
mktemp = "0.5"
|
||||||
nix = { version = "0.29", default-features = false, features = ["fs"] }
|
nix = { version = "0.27", default-features = false, features = ["fs"] }
|
||||||
nom = "7.1"
|
nom = "7.1"
|
||||||
parse_duration = "2.1"
|
parse_duration = "2.1"
|
||||||
pin-project = "1.0.12"
|
pin-project = "1.0.12"
|
||||||
|
@ -141,6 +136,8 @@ thiserror = "1.0"
|
||||||
assert-json-diff = "2.0"
|
assert-json-diff = "2.0"
|
||||||
rustc_version = "0.4.0"
|
rustc_version = "0.4.0"
|
||||||
static_init = "1.0"
|
static_init = "1.0"
|
||||||
|
|
||||||
|
aws-config = "1.1.4"
|
||||||
aws-sdk-config = "1.13"
|
aws-sdk-config = "1.13"
|
||||||
aws-sdk-s3 = "1.14"
|
aws-sdk-s3 = "1.14"
|
||||||
|
|
||||||
|
|
|
@ -3,5 +3,5 @@ FROM scratch
|
||||||
ENV RUST_BACKTRACE=1
|
ENV RUST_BACKTRACE=1
|
||||||
ENV RUST_LOG=garage=info
|
ENV RUST_LOG=garage=info
|
||||||
|
|
||||||
COPY result/bin/garage /
|
COPY result-bin/bin/garage /
|
||||||
CMD [ "/garage", "server"]
|
CMD [ "/garage", "server"]
|
||||||
|
|
13
Makefile
13
Makefile
|
@ -1,10 +1,13 @@
|
||||||
.PHONY: doc all run1 run2 run3
|
.PHONY: doc all release shell run1 run2 run3
|
||||||
|
|
||||||
all:
|
all:
|
||||||
clear
|
clear; cargo build
|
||||||
cargo build \
|
|
||||||
--config 'target.x86_64-unknown-linux-gnu.linker="clang"' \
|
release:
|
||||||
--config 'target.x86_64-unknown-linux-gnu.rustflags=["-C", "link-arg=-fuse-ld=mold"]' \
|
nix-build --attr pkgs.amd64.release --no-build-output
|
||||||
|
|
||||||
|
shell:
|
||||||
|
nix-shell
|
||||||
|
|
||||||
# ----
|
# ----
|
||||||
|
|
||||||
|
|
48
default.nix
48
default.nix
|
@ -3,22 +3,46 @@
|
||||||
with import ./nix/common.nix;
|
with import ./nix/common.nix;
|
||||||
|
|
||||||
let
|
let
|
||||||
pkgs = import nixpkgs { };
|
pkgs = import pkgsSrc { };
|
||||||
compile = import ./nix/compile.nix;
|
compile = import ./nix/compile.nix;
|
||||||
|
|
||||||
build_release = target: (compile {
|
build_debug_and_release = (target: {
|
||||||
inherit target system git_version nixpkgs;
|
debug = (compile {
|
||||||
crane = flake.inputs.crane;
|
inherit system target git_version pkgsSrc cargo2nixOverlay;
|
||||||
rust-overlay = flake.inputs.rust-overlay;
|
release = false;
|
||||||
|
}).workspace.garage { compileMode = "build"; };
|
||||||
|
|
||||||
|
release = (compile {
|
||||||
|
inherit system target git_version pkgsSrc cargo2nixOverlay;
|
||||||
release = true;
|
release = true;
|
||||||
}).garage;
|
}).workspace.garage { compileMode = "build"; };
|
||||||
|
});
|
||||||
|
|
||||||
|
test = (rustPkgs:
|
||||||
|
pkgs.symlinkJoin {
|
||||||
|
name = "garage-tests";
|
||||||
|
paths =
|
||||||
|
builtins.map (key: rustPkgs.workspace.${key} { compileMode = "test"; })
|
||||||
|
(builtins.attrNames rustPkgs.workspace);
|
||||||
|
});
|
||||||
|
|
||||||
in {
|
in {
|
||||||
releasePackages = {
|
pkgs = {
|
||||||
amd64 = build_release "x86_64-unknown-linux-musl";
|
amd64 = build_debug_and_release "x86_64-unknown-linux-musl";
|
||||||
i386 = build_release "i686-unknown-linux-musl";
|
i386 = build_debug_and_release "i686-unknown-linux-musl";
|
||||||
arm64 = build_release "aarch64-unknown-linux-musl";
|
arm64 = build_debug_and_release "aarch64-unknown-linux-musl";
|
||||||
arm = build_release "armv6l-unknown-linux-musleabihf";
|
arm = build_debug_and_release "armv6l-unknown-linux-musleabihf";
|
||||||
|
};
|
||||||
|
test = {
|
||||||
|
amd64 = test (compile {
|
||||||
|
inherit system git_version pkgsSrc cargo2nixOverlay;
|
||||||
|
target = "x86_64-unknown-linux-musl";
|
||||||
|
features = [
|
||||||
|
"garage/bundled-libs"
|
||||||
|
"garage/k2v"
|
||||||
|
"garage/lmdb"
|
||||||
|
"garage/sqlite"
|
||||||
|
];
|
||||||
|
});
|
||||||
};
|
};
|
||||||
flakePackages = flake.packages.${system};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ Garage can also help you serve this content.
|
||||||
|
|
||||||
## Gitea
|
## Gitea
|
||||||
|
|
||||||
You can use Garage with Gitea to store your [git LFS](https://git-lfs.github.com/) data, your users' avatar, and their attachments.
|
You can use Garage with Gitea to store your [git LFS](https://git-lfs.github.com/) data, your users' avatar, and their attachements.
|
||||||
You can configure a different target for each data type (check `[lfs]` and `[attachment]` sections of the Gitea documentation) and you can provide a default one through the `[storage]` section.
|
You can configure a different target for each data type (check `[lfs]` and `[attachment]` sections of the Gitea documentation) and you can provide a default one through the `[storage]` section.
|
||||||
|
|
||||||
Let's start by creating a key and a bucket (your key id and secret will be needed later, keep them somewhere):
|
Let's start by creating a key and a bucket (your key id and secret will be needed later, keep them somewhere):
|
||||||
|
|
|
@ -21,14 +21,14 @@ data_dir = [
|
||||||
```
|
```
|
||||||
|
|
||||||
Garage will automatically balance all blocks stored by the node
|
Garage will automatically balance all blocks stored by the node
|
||||||
among the different specified directories, proportionally to the
|
among the different specified directories, proportionnally to the
|
||||||
specified capacities.
|
specified capacities.
|
||||||
|
|
||||||
## Updating the list of storage locations
|
## Updating the list of storage locations
|
||||||
|
|
||||||
If you add new storage locations to your `data_dir`,
|
If you add new storage locations to your `data_dir`,
|
||||||
Garage will not rebalance existing data between storage locations.
|
Garage will not rebalance existing data between storage locations.
|
||||||
Newly written blocks will be balanced proportionally to the specified capacities,
|
Newly written blocks will be balanced proportionnally to the specified capacities,
|
||||||
and existing data may be moved between drives to improve balancing,
|
and existing data may be moved between drives to improve balancing,
|
||||||
but only opportunistically when a data block is re-written (e.g. an object
|
but only opportunistically when a data block is re-written (e.g. an object
|
||||||
is re-uploaded, or an object with a duplicate block is uploaded).
|
is re-uploaded, or an object with a duplicate block is uploaded).
|
||||||
|
|
70
flake.lock
generated
70
flake.lock
generated
|
@ -1,17 +1,28 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"crane": {
|
"cargo2nix": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"rust-overlay": "rust-overlay"
|
||||||
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1737689766,
|
"lastModified": 1705129117,
|
||||||
"narHash": "sha256-ivVXYaYlShxYoKfSo5+y5930qMKKJ8CLcAoIBPQfJ6s=",
|
"narHash": "sha256-LgdDHibvimzYhxBK3kxCk2gAL7k4Hyigl5KI0X9cijA=",
|
||||||
"owner": "ipetkov",
|
"owner": "cargo2nix",
|
||||||
"repo": "crane",
|
"repo": "cargo2nix",
|
||||||
"rev": "6fe74265bbb6d016d663b1091f015e2976c4a527",
|
"rev": "ae19a9e1f8f0880c088ea155ab66cee1fa001f59",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "ipetkov",
|
"owner": "cargo2nix",
|
||||||
"repo": "crane",
|
"repo": "cargo2nix",
|
||||||
|
"rev": "ae19a9e1f8f0880c088ea155ab66cee1fa001f59",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -31,15 +42,12 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"flake-utils": {
|
"flake-utils": {
|
||||||
"inputs": {
|
|
||||||
"systems": "systems"
|
|
||||||
},
|
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1731533236,
|
"lastModified": 1659877975,
|
||||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -66,46 +74,34 @@
|
||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"crane": "crane",
|
"cargo2nix": "cargo2nix",
|
||||||
"flake-compat": "flake-compat",
|
"flake-compat": "flake-compat",
|
||||||
"flake-utils": "flake-utils",
|
"flake-utils": [
|
||||||
"nixpkgs": "nixpkgs",
|
"cargo2nix",
|
||||||
"rust-overlay": "rust-overlay"
|
"flake-utils"
|
||||||
|
],
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rust-overlay": {
|
"rust-overlay": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
|
"cargo2nix",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1738549608,
|
"lastModified": 1736649126,
|
||||||
"narHash": "sha256-GdyT9QEUSx5k/n8kILuNy83vxxdyUfJ8jL5mMpQZWfw=",
|
"narHash": "sha256-XCw5sv/ePsroqiF3lJM6Y2X9EhPdHeE47gr3Q8b0UQw=",
|
||||||
"owner": "oxalica",
|
"owner": "oxalica",
|
||||||
"repo": "rust-overlay",
|
"repo": "rust-overlay",
|
||||||
"rev": "35c6f8c4352f995ecd53896200769f80a3e8f22d",
|
"rev": "162ab0edc2936508470199b2e8e6c444a2535019",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "oxalica",
|
"owner": "oxalica",
|
||||||
"repo": "rust-overlay",
|
"repo": "rust-overlay",
|
||||||
"rev": "35c6f8c4352f995ecd53896200769f80a3e8f22d",
|
"rev": "162ab0edc2936508470199b2e8e6c444a2535019",
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1681028828,
|
|
||||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
104
flake.nix
104
flake.nix
|
@ -2,84 +2,92 @@
|
||||||
description =
|
description =
|
||||||
"Garage, an S3-compatible distributed object store for self-hosted deployments";
|
"Garage, an S3-compatible distributed object store for self-hosted deployments";
|
||||||
|
|
||||||
# Nixpkgs 24.11 as of 2025-01-12
|
# Nixpkgs 24.11 as of 2025-01-12 has rustc v1.82
|
||||||
inputs.nixpkgs.url =
|
inputs.nixpkgs.url =
|
||||||
"github:NixOS/nixpkgs/7c4869c47090dd7f9f1bdfb49a22aea026996815";
|
"github:NixOS/nixpkgs/7c4869c47090dd7f9f1bdfb49a22aea026996815";
|
||||||
|
|
||||||
# Rust overlay as of 2025-02-03
|
|
||||||
inputs.rust-overlay.url =
|
|
||||||
"github:oxalica/rust-overlay/35c6f8c4352f995ecd53896200769f80a3e8f22d";
|
|
||||||
inputs.rust-overlay.inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
|
|
||||||
inputs.crane.url = "github:ipetkov/crane";
|
|
||||||
|
|
||||||
inputs.flake-compat.url = "github:nix-community/flake-compat";
|
inputs.flake-compat.url = "github:nix-community/flake-compat";
|
||||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs, flake-utils, crane, rust-overlay, ... }:
|
inputs.cargo2nix = {
|
||||||
|
# As of 2022-10-18: two small patches over unstable branch, one for clippy and one to fix feature detection
|
||||||
|
#url = "github:Alexis211/cargo2nix/a7a61179b66054904ef6a195d8da736eaaa06c36";
|
||||||
|
|
||||||
|
# As of 2023-04-25:
|
||||||
|
# - my two patches were merged into unstable (one for clippy and one to "fix" feature detection)
|
||||||
|
# - rustc v1.66
|
||||||
|
# url = "github:cargo2nix/cargo2nix/8fb57a670f7993bfc24099c33eb9c5abb51f29a2";
|
||||||
|
|
||||||
|
# Mainline cargo2nix as of of 2025-01-12 (branch release-0.11.0)
|
||||||
|
url = "github:cargo2nix/cargo2nix/ae19a9e1f8f0880c088ea155ab66cee1fa001f59";
|
||||||
|
|
||||||
|
# Rust overlay as of 2025-01-12
|
||||||
|
inputs.rust-overlay.url =
|
||||||
|
"github:oxalica/rust-overlay/162ab0edc2936508470199b2e8e6c444a2535019";
|
||||||
|
|
||||||
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
inputs.flake-compat.follows = "flake-compat";
|
||||||
|
};
|
||||||
|
|
||||||
|
inputs.flake-utils.follows = "cargo2nix/flake-utils";
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, cargo2nix, flake-utils, ... }:
|
||||||
let
|
let
|
||||||
|
git_version = self.lastModifiedDate;
|
||||||
compile = import ./nix/compile.nix;
|
compile = import ./nix/compile.nix;
|
||||||
in
|
in
|
||||||
flake-utils.lib.eachDefaultSystem (system:
|
flake-utils.lib.eachDefaultSystem (system:
|
||||||
let
|
let
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
packageFor = target: release: (compile {
|
|
||||||
inherit system target nixpkgs crane rust-overlay release;
|
|
||||||
}).garage;
|
|
||||||
testWith = extraTestEnv: (compile {
|
|
||||||
inherit system nixpkgs crane rust-overlay extraTestEnv;
|
|
||||||
release = false;
|
|
||||||
}).garage-test;
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
packages = {
|
packages =
|
||||||
|
let
|
||||||
|
packageFor = target: (compile {
|
||||||
|
inherit system git_version target;
|
||||||
|
pkgsSrc = nixpkgs;
|
||||||
|
cargo2nixOverlay = cargo2nix.overlays.default;
|
||||||
|
release = true;
|
||||||
|
}).workspace.garage { compileMode = "build"; };
|
||||||
|
in
|
||||||
|
{
|
||||||
# default = native release build
|
# default = native release build
|
||||||
default = packageFor null true;
|
default = packageFor null;
|
||||||
|
# other = cross-compiled, statically-linked builds
|
||||||
# <arch> = cross-compiled, statically-linked release builds
|
amd64 = packageFor "x86_64-unknown-linux-musl";
|
||||||
amd64 = packageFor "x86_64-unknown-linux-musl" true;
|
i386 = packageFor "i686-unknown-linux-musl";
|
||||||
i386 = packageFor "i686-unknown-linux-musl" true;
|
arm64 = packageFor "aarch64-unknown-linux-musl";
|
||||||
arm64 = packageFor "aarch64-unknown-linux-musl" true;
|
arm = packageFor "armv6l-unknown-linux-musl";
|
||||||
arm = packageFor "armv6l-unknown-linux-musl" true;
|
|
||||||
|
|
||||||
# dev = native dev build
|
|
||||||
dev = packageFor null false;
|
|
||||||
|
|
||||||
# test = cargo test
|
|
||||||
tests = testWith {};
|
|
||||||
tests-lmdb = testWith {
|
|
||||||
GARAGE_TEST_INTEGRATION_DB_ENGINE = "lmdb";
|
|
||||||
};
|
|
||||||
tests-sqlite = testWith {
|
|
||||||
GARAGE_TEST_INTEGRATION_DB_ENGINE = "sqlite";
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# ---- developpment shell, for making native builds only ----
|
# ---- developpment shell, for making native builds only ----
|
||||||
devShells =
|
devShells =
|
||||||
let
|
let
|
||||||
targets = compile {
|
shellWithPackages = (packages: (compile {
|
||||||
inherit system nixpkgs crane rust-overlay;
|
inherit system git_version;
|
||||||
};
|
pkgsSrc = nixpkgs;
|
||||||
|
cargo2nixOverlay = cargo2nix.overlays.default;
|
||||||
|
}).workspaceShell { inherit packages; });
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
default = targets.devShell;
|
default = shellWithPackages
|
||||||
|
(with pkgs; [
|
||||||
|
rustfmt
|
||||||
|
clang
|
||||||
|
mold
|
||||||
|
]);
|
||||||
|
|
||||||
# import the full shell using `nix develop .#full`
|
# import the full shell using `nix develop .#full`
|
||||||
full = pkgs.mkShell {
|
full = shellWithPackages (with pkgs; [
|
||||||
buildInputs = with pkgs; [
|
rustfmt
|
||||||
targets.toolchain
|
rust-analyzer
|
||||||
protobuf
|
|
||||||
clang
|
clang
|
||||||
mold
|
mold
|
||||||
# ---- extra packages for dev tasks ----
|
# ---- extra packages for dev tasks ----
|
||||||
rust-analyzer
|
|
||||||
cargo-audit
|
cargo-audit
|
||||||
cargo-outdated
|
cargo-outdated
|
||||||
cargo-machete
|
cargo-machete
|
||||||
nixpkgs-fmt
|
nixpkgs-fmt
|
||||||
];
|
]);
|
||||||
};
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
with import ./common.nix;
|
with import ./common.nix;
|
||||||
let
|
let
|
||||||
pkgs = import nixpkgs { };
|
pkgs = import pkgsSrc { };
|
||||||
lib = pkgs.lib;
|
lib = pkgs.lib;
|
||||||
|
|
||||||
/* Converts a key list and a value list to a set
|
/* Converts a key list and a value list to a set
|
||||||
|
|
|
@ -10,9 +10,9 @@ let
|
||||||
|
|
||||||
flake = (import flake-compat { system = builtins.currentSystem; src = ../.; });
|
flake = (import flake-compat { system = builtins.currentSystem; src = ../.; });
|
||||||
in
|
in
|
||||||
|
rec {
|
||||||
{
|
pkgsSrc = flake.defaultNix.inputs.nixpkgs;
|
||||||
flake = flake.defaultNix;
|
cargo2nix = flake.defaultNix.inputs.cargo2nix;
|
||||||
nixpkgs = flake.defaultNix.inputs.nixpkgs;
|
cargo2nixOverlay = cargo2nix.overlays.default;
|
||||||
devShells = flake.defaultNix.devShells.${builtins.currentSystem};
|
devShells = builtins.getAttr builtins.currentSystem flake.defaultNix.devShells;
|
||||||
}
|
}
|
||||||
|
|
238
nix/compile.nix
238
nix/compile.nix
|
@ -1,64 +1,83 @@
|
||||||
{
|
{ system, target ? null, pkgsSrc, cargo2nixOverlay
|
||||||
/* build inputs */
|
, release ? false, git_version ? null, features ? null, }:
|
||||||
nixpkgs,
|
|
||||||
crane,
|
|
||||||
rust-overlay,
|
|
||||||
|
|
||||||
/* parameters */
|
|
||||||
system,
|
|
||||||
git_version ? null,
|
|
||||||
target ? null,
|
|
||||||
release ? false,
|
|
||||||
features ? null,
|
|
||||||
extraTestEnv ? {}
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
let
|
||||||
log = v: builtins.trace v v;
|
log = v: builtins.trace v v;
|
||||||
|
|
||||||
# NixOS and Rust/Cargo triples do not match for ARM, fix it here.
|
|
||||||
rustTarget = if target == "armv6l-unknown-linux-musleabihf" then
|
|
||||||
"arm-unknown-linux-musleabihf"
|
|
||||||
else
|
|
||||||
target;
|
|
||||||
|
|
||||||
rustTargetEnvMap = {
|
|
||||||
"x86_64-unknown-linux-musl" = "X86_64_UNKNOWN_LINUX_MUSL";
|
|
||||||
"aarch64-unknown-linux-musl" = "AARCH64_UNKNOWN_LINUX_MUSL";
|
|
||||||
"i686-unknown-linux-musl" = "I686_UNKNOWN_LINUX_MUSL";
|
|
||||||
"arm-unknown-linux-musleabihf" = "ARM_UNKNOWN_LINUX_MUSLEABIHF";
|
|
||||||
};
|
|
||||||
|
|
||||||
pkgsNative = import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
overlays = [ (import rust-overlay) ];
|
|
||||||
};
|
|
||||||
|
|
||||||
pkgs = if target != null then
|
pkgs = if target != null then
|
||||||
import nixpkgs {
|
import pkgsSrc {
|
||||||
inherit system;
|
inherit system;
|
||||||
crossSystem = {
|
crossSystem = {
|
||||||
config = target;
|
config = target;
|
||||||
isStatic = true;
|
isStatic = true;
|
||||||
};
|
};
|
||||||
overlays = [ (import rust-overlay) ];
|
overlays = [ cargo2nixOverlay ];
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
pkgsNative;
|
import pkgsSrc {
|
||||||
|
inherit system;
|
||||||
|
overlays = [ cargo2nixOverlay ];
|
||||||
|
};
|
||||||
|
|
||||||
inherit (pkgs) lib stdenv;
|
toolchainOptions = {
|
||||||
|
rustVersion = "1.78.0";
|
||||||
|
extraRustComponents = [ "clippy" ];
|
||||||
|
};
|
||||||
|
|
||||||
toolchainFn = (p: p.rust-bin.stable."1.82.0".default.override {
|
/* Cargo2nix provides many overrides by default, you can take inspiration from them:
|
||||||
targets = lib.optionals (target != null) [ rustTarget ];
|
https://github.com/cargo2nix/cargo2nix/blob/master/overlay/overrides.nix
|
||||||
extensions = [
|
|
||||||
"rust-src"
|
You can have a complete list of the available options by looking at the overriden object, mkcrate:
|
||||||
"rustfmt"
|
https://github.com/cargo2nix/cargo2nix/blob/master/overlay/mkcrate.nix
|
||||||
|
*/
|
||||||
|
packageOverrides = pkgs:
|
||||||
|
pkgs.rustBuilder.overrides.all ++ [
|
||||||
|
/* [1] We need to alter Nix hardening to make static binaries: PIE,
|
||||||
|
Position Independent Executables seems to be supported only on amd64. Having
|
||||||
|
this flag set either 1. make our executables crash or 2. compile as dynamic on some platforms.
|
||||||
|
Here, we deactivate it. Later (find `codegenOpts`), we reactivate it for supported targets
|
||||||
|
(only amd64 curently) through the `-static-pie` flag.
|
||||||
|
PIE is a feature used by ASLR, which helps mitigate security issues.
|
||||||
|
Learn more about Nix Hardening at: https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/cc-wrapper/add-hardening.sh
|
||||||
|
|
||||||
|
[2] We want to inject the git version while keeping the build deterministic.
|
||||||
|
As we do not want to consider the .git folder as part of the input source,
|
||||||
|
we ask the user (the CI often) to pass the value to Nix.
|
||||||
|
|
||||||
|
[3] We don't want libsodium-sys and zstd-sys to try to use pkgconfig to build against a system library.
|
||||||
|
However the features to do so get activated for some reason (due to a bug in cargo2nix?),
|
||||||
|
so disable them manually here.
|
||||||
|
*/
|
||||||
|
(pkgs.rustBuilder.rustLib.makeOverride {
|
||||||
|
name = "garage";
|
||||||
|
overrideAttrs = drv:
|
||||||
|
(if git_version != null then {
|
||||||
|
# [2]
|
||||||
|
preConfigure = ''
|
||||||
|
${drv.preConfigure or ""}
|
||||||
|
export GIT_VERSION="${git_version}"
|
||||||
|
'';
|
||||||
|
} else
|
||||||
|
{ }) // {
|
||||||
|
# [1]
|
||||||
|
hardeningDisable = [ "pie" ];
|
||||||
|
};
|
||||||
|
})
|
||||||
|
|
||||||
|
(pkgs.rustBuilder.rustLib.makeOverride {
|
||||||
|
name = "libsodium-sys";
|
||||||
|
overrideArgs = old: {
|
||||||
|
features = [ ]; # [3]
|
||||||
|
};
|
||||||
|
})
|
||||||
|
|
||||||
|
(pkgs.rustBuilder.rustLib.makeOverride {
|
||||||
|
name = "zstd-sys";
|
||||||
|
overrideArgs = old: {
|
||||||
|
features = [ ]; # [3]
|
||||||
|
};
|
||||||
|
})
|
||||||
];
|
];
|
||||||
});
|
|
||||||
|
|
||||||
craneLib = (crane.mkLib pkgs).overrideToolchain toolchainFn;
|
|
||||||
|
|
||||||
src = craneLib.cleanCargoSource ../.;
|
|
||||||
|
|
||||||
/* We ship some parts of the code disabled by default by putting them behind a flag.
|
/* We ship some parts of the code disabled by default by putting them behind a flag.
|
||||||
It speeds up the compilation (when the feature is not required) and released crates have less dependency by default (less attack surface, disk space, etc.).
|
It speeds up the compilation (when the feature is not required) and released crates have less dependency by default (less attack surface, disk space, etc.).
|
||||||
|
@ -68,15 +87,16 @@ let
|
||||||
rootFeatures = if features != null then
|
rootFeatures = if features != null then
|
||||||
features
|
features
|
||||||
else
|
else
|
||||||
([ "bundled-libs" "lmdb" "sqlite" "k2v" ] ++ (lib.optionals release [
|
([ "garage/bundled-libs" "garage/lmdb" "garage/sqlite" "garage/k2v" ] ++ (if release then [
|
||||||
"consul-discovery"
|
"garage/consul-discovery"
|
||||||
"kubernetes-discovery"
|
"garage/kubernetes-discovery"
|
||||||
"metrics"
|
"garage/metrics"
|
||||||
"telemetry-otlp"
|
"garage/telemetry-otlp"
|
||||||
"syslog"
|
"garage/syslog"
|
||||||
]));
|
] else
|
||||||
|
[ ]));
|
||||||
|
|
||||||
featuresStr = lib.concatStringsSep "," rootFeatures;
|
packageFun = import ../Cargo.nix;
|
||||||
|
|
||||||
/* We compile fully static binaries with musl to simplify deployment on most systems.
|
/* We compile fully static binaries with musl to simplify deployment on most systems.
|
||||||
When possible, we reactivate PIE hardening (see above).
|
When possible, we reactivate PIE hardening (see above).
|
||||||
|
@ -87,9 +107,12 @@ let
|
||||||
For more information on static builds, please refer to Rust's RFC 1721.
|
For more information on static builds, please refer to Rust's RFC 1721.
|
||||||
https://rust-lang.github.io/rfcs/1721-crt-static.html#specifying-dynamicstatic-c-runtime-linkage
|
https://rust-lang.github.io/rfcs/1721-crt-static.html#specifying-dynamicstatic-c-runtime-linkage
|
||||||
*/
|
*/
|
||||||
codegenOptsMap = {
|
|
||||||
"x86_64-unknown-linux-musl" =
|
codegenOpts = {
|
||||||
[ "target-feature=+crt-static" "link-arg=-static-pie" ];
|
"armv6l-unknown-linux-musleabihf" = [
|
||||||
|
"target-feature=+crt-static"
|
||||||
|
"link-arg=-static"
|
||||||
|
]; # compile as dynamic with static-pie
|
||||||
"aarch64-unknown-linux-musl" = [
|
"aarch64-unknown-linux-musl" = [
|
||||||
"target-feature=+crt-static"
|
"target-feature=+crt-static"
|
||||||
"link-arg=-static"
|
"link-arg=-static"
|
||||||
|
@ -98,95 +121,18 @@ let
|
||||||
"target-feature=+crt-static"
|
"target-feature=+crt-static"
|
||||||
"link-arg=-static"
|
"link-arg=-static"
|
||||||
]; # segfault with static-pie
|
]; # segfault with static-pie
|
||||||
"armv6l-unknown-linux-musleabihf" = [
|
"x86_64-unknown-linux-musl" =
|
||||||
"target-feature=+crt-static"
|
[ "target-feature=+crt-static" "link-arg=-static-pie" ];
|
||||||
"link-arg=-static"
|
|
||||||
]; # compile as dynamic with static-pie
|
|
||||||
};
|
};
|
||||||
|
|
||||||
codegenOpts = if target != null then codegenOptsMap.${target} else [
|
# NixOS and Rust/Cargo triples do not match for ARM, fix it here.
|
||||||
"link-arg=-fuse-ld=mold"
|
rustTarget = if target == "armv6l-unknown-linux-musleabihf" then
|
||||||
];
|
"arm-unknown-linux-musleabihf"
|
||||||
|
else
|
||||||
|
target;
|
||||||
|
|
||||||
commonArgs =
|
in pkgs.rustBuilder.makePackageSet ({
|
||||||
{
|
inherit release packageFun packageOverrides codegenOpts rootFeatures;
|
||||||
inherit src;
|
target = rustTarget;
|
||||||
pname = "garage";
|
workspaceSrc = pkgs.lib.cleanSource ../.;
|
||||||
version = "dev";
|
} // toolchainOptions)
|
||||||
|
|
||||||
strictDeps = true;
|
|
||||||
cargoExtraArgs = "--locked --features ${featuresStr}";
|
|
||||||
cargoTestExtraArgs = "--workspace";
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
|
||||||
pkgsNative.protobuf
|
|
||||||
pkgs.stdenv.cc
|
|
||||||
] ++ lib.optionals (target == null) [
|
|
||||||
pkgs.clang
|
|
||||||
pkgs.mold
|
|
||||||
];
|
|
||||||
|
|
||||||
CARGO_PROFILE = if release then "release" else "dev";
|
|
||||||
CARGO_BUILD_RUSTFLAGS =
|
|
||||||
lib.concatStringsSep
|
|
||||||
" "
|
|
||||||
(builtins.map (flag: "-C ${flag}") codegenOpts);
|
|
||||||
}
|
|
||||||
//
|
|
||||||
(if rustTarget != null then {
|
|
||||||
CARGO_BUILD_TARGET = rustTarget;
|
|
||||||
|
|
||||||
"CARGO_TARGET_${rustTargetEnvMap.${rustTarget}}_LINKER" = "${stdenv.cc.targetPrefix}cc";
|
|
||||||
|
|
||||||
HOST_CC = "${stdenv.cc.nativePrefix}cc";
|
|
||||||
TARGET_CC = "${stdenv.cc.targetPrefix}cc";
|
|
||||||
} else {
|
|
||||||
CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER = "clang";
|
|
||||||
});
|
|
||||||
|
|
||||||
in rec {
|
|
||||||
toolchain = toolchainFn pkgs;
|
|
||||||
|
|
||||||
devShell = pkgs.mkShell {
|
|
||||||
buildInputs = [
|
|
||||||
toolchain
|
|
||||||
] ++ (with pkgs; [
|
|
||||||
protobuf
|
|
||||||
clang
|
|
||||||
mold
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
# ---- building garage ----
|
|
||||||
|
|
||||||
garage-deps = craneLib.buildDepsOnly commonArgs;
|
|
||||||
|
|
||||||
garage = craneLib.buildPackage (commonArgs // {
|
|
||||||
cargoArtifacts = garage-deps;
|
|
||||||
|
|
||||||
doCheck = false;
|
|
||||||
} //
|
|
||||||
(if git_version != null then {
|
|
||||||
version = git_version;
|
|
||||||
GIT_VERSION = git_version;
|
|
||||||
} else {}));
|
|
||||||
|
|
||||||
# ---- testing garage ----
|
|
||||||
|
|
||||||
garage-test-bin = craneLib.cargoBuild (commonArgs // {
|
|
||||||
cargoArtifacts = garage-deps;
|
|
||||||
|
|
||||||
pname = "garage-tests";
|
|
||||||
|
|
||||||
CARGO_PROFILE = "test";
|
|
||||||
cargoExtraArgs = "${commonArgs.cargoExtraArgs} --tests --workspace";
|
|
||||||
doCheck = false;
|
|
||||||
});
|
|
||||||
|
|
||||||
garage-test = craneLib.cargoTest (commonArgs // {
|
|
||||||
cargoArtifacts = garage-test-bin;
|
|
||||||
nativeBuildInputs = commonArgs.nativeBuildInputs ++ [
|
|
||||||
pkgs.cacert
|
|
||||||
];
|
|
||||||
} // extraTestEnv);
|
|
||||||
}
|
|
||||||
|
|
|
@ -7,12 +7,7 @@ if [ "$#" -ne 1 ]; then
|
||||||
exit 2
|
exit 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -x "$1" ]; then
|
if file $1 | grep 'dynamically linked' 2>&1; then
|
||||||
echo "[fail] $1 does not exist or is not an executable"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if file "$1" | grep 'dynamically linked' 2>&1; then
|
|
||||||
echo "[fail] $1 is dynamic"
|
echo "[fail] $1 is dynamic"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
with import ./nix/common.nix;
|
with import ./nix/common.nix;
|
||||||
|
|
||||||
let
|
let
|
||||||
pkgs = import nixpkgs {
|
pkgs = import pkgsSrc {
|
||||||
inherit system;
|
inherit system;
|
||||||
};
|
};
|
||||||
winscp = (import ./nix/winscp.nix) pkgs;
|
winscp = (import ./nix/winscp.nix) pkgs;
|
||||||
|
@ -39,7 +39,7 @@ in
|
||||||
--endpoint-url https://garage.deuxfleurs.fr \
|
--endpoint-url https://garage.deuxfleurs.fr \
|
||||||
--region garage \
|
--region garage \
|
||||||
s3 cp \
|
s3 cp \
|
||||||
./result/bin/garage \
|
./result-bin/bin/garage \
|
||||||
s3://garagehq.deuxfleurs.fr/_releases/''${CI_COMMIT_TAG:-$CI_COMMIT_SHA}/''${TARGET}/garage
|
s3://garagehq.deuxfleurs.fr/_releases/''${CI_COMMIT_TAG:-$CI_COMMIT_SHA}/''${TARGET}/garage
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[package]
|
[package]
|
||||||
name = "garage_api_s3"
|
name = "garage_api"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
authors = ["Alex Auvolat <alex@adnab.me>"]
|
authors = ["Alex Auvolat <alex@adnab.me>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
@ -20,24 +20,30 @@ garage_block.workspace = true
|
||||||
garage_net.workspace = true
|
garage_net.workspace = true
|
||||||
garage_util.workspace = true
|
garage_util.workspace = true
|
||||||
garage_rpc.workspace = true
|
garage_rpc.workspace = true
|
||||||
garage_api_common.workspace = true
|
|
||||||
|
|
||||||
aes-gcm.workspace = true
|
aes-gcm.workspace = true
|
||||||
|
argon2.workspace = true
|
||||||
async-compression.workspace = true
|
async-compression.workspace = true
|
||||||
|
async-trait.workspace = true
|
||||||
base64.workspace = true
|
base64.workspace = true
|
||||||
bytes.workspace = true
|
bytes.workspace = true
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
crc32fast.workspace = true
|
crc32fast.workspace = true
|
||||||
crc32c.workspace = true
|
crc32c.workspace = true
|
||||||
|
crypto-common.workspace = true
|
||||||
err-derive.workspace = true
|
err-derive.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
|
hmac.workspace = true
|
||||||
|
idna.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
md-5.workspace = true
|
md-5.workspace = true
|
||||||
|
nom.workspace = true
|
||||||
pin-project.workspace = true
|
pin-project.workspace = true
|
||||||
sha1.workspace = true
|
sha1.workspace = true
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
tokio-stream.workspace = true
|
tokio-stream.workspace = true
|
||||||
tokio-util.workspace = true
|
tokio-util.workspace = true
|
||||||
|
@ -48,13 +54,21 @@ httpdate.workspace = true
|
||||||
http-range.workspace = true
|
http-range.workspace = true
|
||||||
http-body-util.workspace = true
|
http-body-util.workspace = true
|
||||||
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
|
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
|
||||||
|
hyper-util.workspace = true
|
||||||
multer.workspace = true
|
multer.workspace = true
|
||||||
percent-encoding.workspace = true
|
percent-encoding.workspace = true
|
||||||
roxmltree.workspace = true
|
roxmltree.workspace = true
|
||||||
url.workspace = true
|
url.workspace = true
|
||||||
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
serde_bytes.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
quick-xml.workspace = true
|
quick-xml.workspace = true
|
||||||
|
|
||||||
opentelemetry.workspace = true
|
opentelemetry.workspace = true
|
||||||
|
opentelemetry-prometheus = { workspace = true, optional = true }
|
||||||
|
prometheus = { workspace = true, optional = true }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
k2v = [ "garage_util/k2v", "garage_model/k2v" ]
|
||||||
|
metrics = [ "opentelemetry-prometheus", "prometheus" ]
|
|
@ -1,43 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "garage_api_admin"
|
|
||||||
version = "1.0.1"
|
|
||||||
authors = ["Alex Auvolat <alex@adnab.me>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "AGPL-3.0"
|
|
||||||
description = "Admin API server crate for the Garage object store"
|
|
||||||
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
|
|
||||||
readme = "../../README.md"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "lib.rs"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
garage_model.workspace = true
|
|
||||||
garage_table.workspace = true
|
|
||||||
garage_util.workspace = true
|
|
||||||
garage_rpc.workspace = true
|
|
||||||
garage_api_common.workspace = true
|
|
||||||
|
|
||||||
argon2.workspace = true
|
|
||||||
async-trait.workspace = true
|
|
||||||
err-derive.workspace = true
|
|
||||||
hex.workspace = true
|
|
||||||
tracing.workspace = true
|
|
||||||
|
|
||||||
futures.workspace = true
|
|
||||||
tokio.workspace = true
|
|
||||||
http.workspace = true
|
|
||||||
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
|
|
||||||
url.workspace = true
|
|
||||||
|
|
||||||
serde.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
|
|
||||||
opentelemetry.workspace = true
|
|
||||||
opentelemetry-prometheus = { workspace = true, optional = true }
|
|
||||||
prometheus = { workspace = true, optional = true }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
metrics = [ "opentelemetry-prometheus", "prometheus" ]
|
|
|
@ -2,6 +2,7 @@ use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use argon2::password_hash::PasswordHash;
|
use argon2::password_hash::PasswordHash;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use http::header::{ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ALLOW};
|
use http::header::{ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ALLOW};
|
||||||
use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
|
use hyper::{body::Incoming as IncomingBody, Request, Response, StatusCode};
|
||||||
|
@ -19,15 +20,15 @@ use garage_rpc::system::ClusterHealthStatus;
|
||||||
use garage_util::error::Error as GarageError;
|
use garage_util::error::Error as GarageError;
|
||||||
use garage_util::socket_address::UnixOrTCPSocketAddress;
|
use garage_util::socket_address::UnixOrTCPSocketAddress;
|
||||||
|
|
||||||
use garage_api_common::generic_server::*;
|
use crate::generic_server::*;
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
|
|
||||||
use crate::bucket::*;
|
use crate::admin::bucket::*;
|
||||||
use crate::cluster::*;
|
use crate::admin::cluster::*;
|
||||||
use crate::error::*;
|
use crate::admin::error::*;
|
||||||
use crate::key::*;
|
use crate::admin::key::*;
|
||||||
use crate::router_v0;
|
use crate::admin::router_v0;
|
||||||
use crate::router_v1::{Authorization, Endpoint};
|
use crate::admin::router_v1::{Authorization, Endpoint};
|
||||||
|
use crate::helpers::*;
|
||||||
|
|
||||||
pub type ResBody = BoxBody<Error>;
|
pub type ResBody = BoxBody<Error>;
|
||||||
|
|
||||||
|
@ -220,6 +221,7 @@ impl AdminApiServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl ApiHandler for AdminApiServer {
|
impl ApiHandler for AdminApiServer {
|
||||||
const API_NAME: &'static str = "admin";
|
const API_NAME: &'static str = "admin";
|
||||||
const API_NAME_DISPLAY: &'static str = "Admin";
|
const API_NAME_DISPLAY: &'static str = "Admin";
|
||||||
|
|
|
@ -17,12 +17,11 @@ use garage_model::permission::*;
|
||||||
use garage_model::s3::mpu_table;
|
use garage_model::s3::mpu_table;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use garage_api_common::common_error::CommonError;
|
use crate::admin::api_server::ResBody;
|
||||||
use garage_api_common::helpers::*;
|
use crate::admin::error::*;
|
||||||
|
use crate::admin::key::ApiBucketKeyPerm;
|
||||||
use crate::api_server::ResBody;
|
use crate::common_error::CommonError;
|
||||||
use crate::error::*;
|
use crate::helpers::*;
|
||||||
use crate::key::ApiBucketKeyPerm;
|
|
||||||
|
|
||||||
pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
||||||
let buckets = garage
|
let buckets = garage
|
||||||
|
|
|
@ -12,10 +12,9 @@ use garage_rpc::layout;
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
use garage_api_common::helpers::{json_ok_response, parse_json_body};
|
use crate::admin::api_server::ResBody;
|
||||||
|
use crate::admin::error::*;
|
||||||
use crate::api_server::ResBody;
|
use crate::helpers::{json_ok_response, parse_json_body};
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
||||||
let layout = garage.system.cluster_layout();
|
let layout = garage.system.cluster_layout();
|
||||||
|
|
|
@ -1,24 +1,20 @@
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
|
||||||
use err_derive::Error;
|
use err_derive::Error;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
pub use garage_model::helper::error::Error as HelperError;
|
pub use garage_model::helper::error::Error as HelperError;
|
||||||
|
|
||||||
use garage_api_common::common_error::{commonErrorDerivative, CommonError};
|
use crate::common_error::CommonError;
|
||||||
pub use garage_api_common::common_error::{
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
|
use crate::generic_server::ApiError;
|
||||||
};
|
use crate::helpers::*;
|
||||||
use garage_api_common::generic_server::ApiError;
|
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(display = "{}", _0)]
|
#[error(display = "{}", _0)]
|
||||||
/// Error from common error
|
/// Error from common error
|
||||||
Common(#[error(source)] CommonError),
|
Common(CommonError),
|
||||||
|
|
||||||
// Category: cannot process
|
// Category: cannot process
|
||||||
/// The API access key does not exist
|
/// The API access key does not exist
|
||||||
|
@ -33,21 +29,17 @@ pub enum Error {
|
||||||
KeyAlreadyExists(String),
|
KeyAlreadyExists(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
commonErrorDerivative!(Error);
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
/// FIXME: helper errors are transformed into their corresponding variants
|
CommonError: From<T>,
|
||||||
/// in the Error struct, but in many case a helper error should be considered
|
{
|
||||||
/// an internal error.
|
fn from(err: T) -> Self {
|
||||||
impl From<HelperError> for Error {
|
Error::Common(CommonError::from(err))
|
||||||
fn from(err: HelperError) -> Error {
|
|
||||||
match CommonError::try_from(err) {
|
|
||||||
Ok(ce) => Self::Common(ce),
|
|
||||||
Err(HelperError::NoSuchAccessKey(k)) => Self::NoSuchAccessKey(k),
|
|
||||||
Err(_) => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
fn code(&self) -> &'static str {
|
fn code(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
@ -9,10 +9,9 @@ use garage_table::*;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::key_table::*;
|
use garage_model::key_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::admin::api_server::ResBody;
|
||||||
|
use crate::admin::error::*;
|
||||||
use crate::api_server::ResBody;
|
use crate::helpers::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
|
||||||
let res = garage
|
let res = garage
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#[macro_use]
|
|
||||||
extern crate tracing;
|
|
||||||
|
|
||||||
pub mod api_server;
|
pub mod api_server;
|
||||||
mod error;
|
mod error;
|
||||||
mod router_v0;
|
mod router_v0;
|
|
@ -2,9 +2,8 @@ use std::borrow::Cow;
|
||||||
|
|
||||||
use hyper::{Method, Request};
|
use hyper::{Method, Request};
|
||||||
|
|
||||||
use garage_api_common::router_macros::*;
|
use crate::admin::error::*;
|
||||||
|
use crate::router_macros::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
router_match! {@func
|
router_match! {@func
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,9 @@ use std::borrow::Cow;
|
||||||
|
|
||||||
use hyper::{Method, Request};
|
use hyper::{Method, Request};
|
||||||
|
|
||||||
use garage_api_common::router_macros::*;
|
use crate::admin::error::*;
|
||||||
|
use crate::admin::router_v0;
|
||||||
use crate::error::*;
|
use crate::router_macros::*;
|
||||||
use crate::router_v0;
|
|
||||||
|
|
||||||
pub enum Authorization {
|
pub enum Authorization {
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "garage_api_common"
|
|
||||||
version = "1.0.1"
|
|
||||||
authors = ["Alex Auvolat <alex@adnab.me>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "AGPL-3.0"
|
|
||||||
description = "Common functions for the API server crates for the Garage object store"
|
|
||||||
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
|
|
||||||
readme = "../../README.md"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "lib.rs"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
garage_model.workspace = true
|
|
||||||
garage_table.workspace = true
|
|
||||||
garage_util.workspace = true
|
|
||||||
|
|
||||||
bytes.workspace = true
|
|
||||||
chrono.workspace = true
|
|
||||||
crypto-common.workspace = true
|
|
||||||
err-derive.workspace = true
|
|
||||||
hex.workspace = true
|
|
||||||
hmac.workspace = true
|
|
||||||
idna.workspace = true
|
|
||||||
tracing.workspace = true
|
|
||||||
nom.workspace = true
|
|
||||||
pin-project.workspace = true
|
|
||||||
sha2.workspace = true
|
|
||||||
|
|
||||||
futures.workspace = true
|
|
||||||
tokio.workspace = true
|
|
||||||
http.workspace = true
|
|
||||||
http-body-util.workspace = true
|
|
||||||
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
|
|
||||||
hyper-util.workspace = true
|
|
||||||
url.workspace = true
|
|
||||||
|
|
||||||
serde.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
|
|
||||||
opentelemetry.workspace = true
|
|
|
@ -1,170 +0,0 @@
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use http::header::{
|
|
||||||
ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
|
|
||||||
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD,
|
|
||||||
};
|
|
||||||
use hyper::{body::Body, body::Incoming as IncomingBody, Request, Response, StatusCode};
|
|
||||||
|
|
||||||
use garage_model::bucket_table::{BucketParams, CorsRule as GarageCorsRule};
|
|
||||||
use garage_model::garage::Garage;
|
|
||||||
|
|
||||||
use crate::common_error::{
|
|
||||||
helper_error_as_internal, CommonError, OkOrBadRequest, OkOrInternalError,
|
|
||||||
};
|
|
||||||
use crate::helpers::*;
|
|
||||||
|
|
||||||
pub fn find_matching_cors_rule<'a>(
|
|
||||||
bucket_params: &'a BucketParams,
|
|
||||||
req: &Request<impl Body>,
|
|
||||||
) -> Result<Option<&'a GarageCorsRule>, CommonError> {
|
|
||||||
if let Some(cors_config) = bucket_params.cors_config.get() {
|
|
||||||
if let Some(origin) = req.headers().get("Origin") {
|
|
||||||
let origin = origin.to_str()?;
|
|
||||||
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
|
|
||||||
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
|
|
||||||
None => vec![],
|
|
||||||
};
|
|
||||||
return Ok(cors_config.iter().find(|rule| {
|
|
||||||
cors_rule_matches(rule, origin, req.method().as_ref(), request_headers.iter())
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cors_rule_matches<'a, HI, S>(
|
|
||||||
rule: &GarageCorsRule,
|
|
||||||
origin: &'a str,
|
|
||||||
method: &'a str,
|
|
||||||
mut request_headers: HI,
|
|
||||||
) -> bool
|
|
||||||
where
|
|
||||||
HI: Iterator<Item = S>,
|
|
||||||
S: AsRef<str>,
|
|
||||||
{
|
|
||||||
rule.allow_origins.iter().any(|x| x == "*" || x == origin)
|
|
||||||
&& rule.allow_methods.iter().any(|x| x == "*" || x == method)
|
|
||||||
&& request_headers.all(|h| {
|
|
||||||
rule.allow_headers
|
|
||||||
.iter()
|
|
||||||
.any(|x| x == "*" || x == h.as_ref())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_cors_headers(
|
|
||||||
resp: &mut Response<impl Body>,
|
|
||||||
rule: &GarageCorsRule,
|
|
||||||
) -> Result<(), http::header::InvalidHeaderValue> {
|
|
||||||
let h = resp.headers_mut();
|
|
||||||
h.insert(
|
|
||||||
ACCESS_CONTROL_ALLOW_ORIGIN,
|
|
||||||
rule.allow_origins.join(", ").parse()?,
|
|
||||||
);
|
|
||||||
h.insert(
|
|
||||||
ACCESS_CONTROL_ALLOW_METHODS,
|
|
||||||
rule.allow_methods.join(", ").parse()?,
|
|
||||||
);
|
|
||||||
h.insert(
|
|
||||||
ACCESS_CONTROL_ALLOW_HEADERS,
|
|
||||||
rule.allow_headers.join(", ").parse()?,
|
|
||||||
);
|
|
||||||
h.insert(
|
|
||||||
ACCESS_CONTROL_EXPOSE_HEADERS,
|
|
||||||
rule.expose_headers.join(", ").parse()?,
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_options_api(
|
|
||||||
garage: Arc<Garage>,
|
|
||||||
req: &Request<IncomingBody>,
|
|
||||||
bucket_name: Option<String>,
|
|
||||||
) -> Result<Response<EmptyBody>, CommonError> {
|
|
||||||
// FIXME: CORS rules of buckets with local aliases are
|
|
||||||
// not taken into account.
|
|
||||||
|
|
||||||
// If the bucket name is a global bucket name,
|
|
||||||
// we try to apply the CORS rules of that bucket.
|
|
||||||
// If a user has a local bucket name that has
|
|
||||||
// the same name, its CORS rules won't be applied
|
|
||||||
// and will be shadowed by the rules of the globally
|
|
||||||
// existing bucket (but this is inevitable because
|
|
||||||
// OPTIONS calls are not auhtenticated).
|
|
||||||
if let Some(bn) = bucket_name {
|
|
||||||
let helper = garage.bucket_helper();
|
|
||||||
let bucket_id = helper
|
|
||||||
.resolve_global_bucket_name(&bn)
|
|
||||||
.await
|
|
||||||
.map_err(helper_error_as_internal)?;
|
|
||||||
if let Some(id) = bucket_id {
|
|
||||||
let bucket = garage
|
|
||||||
.bucket_helper()
|
|
||||||
.get_existing_bucket(id)
|
|
||||||
.await
|
|
||||||
.map_err(helper_error_as_internal)?;
|
|
||||||
let bucket_params = bucket.state.into_option().unwrap();
|
|
||||||
handle_options_for_bucket(req, &bucket_params)
|
|
||||||
} else {
|
|
||||||
// If there is a bucket name in the request, but that name
|
|
||||||
// does not correspond to a global alias for a bucket,
|
|
||||||
// then it's either a non-existing bucket or a local bucket.
|
|
||||||
// We have no way of knowing, because the request is not
|
|
||||||
// authenticated and thus we can't resolve local aliases.
|
|
||||||
// We take the permissive approach of allowing everything,
|
|
||||||
// because we don't want to prevent web apps that use
|
|
||||||
// local bucket names from making API calls.
|
|
||||||
Ok(Response::builder()
|
|
||||||
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
|
|
||||||
.header(ACCESS_CONTROL_ALLOW_METHODS, "*")
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(EmptyBody::new())?)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If there is no bucket name in the request,
|
|
||||||
// we are doing a ListBuckets call, which we want to allow
|
|
||||||
// for all origins.
|
|
||||||
Ok(Response::builder()
|
|
||||||
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
|
|
||||||
.header(ACCESS_CONTROL_ALLOW_METHODS, "GET")
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(EmptyBody::new())?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn handle_options_for_bucket(
|
|
||||||
req: &Request<IncomingBody>,
|
|
||||||
bucket_params: &BucketParams,
|
|
||||||
) -> Result<Response<EmptyBody>, CommonError> {
|
|
||||||
let origin = req
|
|
||||||
.headers()
|
|
||||||
.get("Origin")
|
|
||||||
.ok_or_bad_request("Missing Origin header")?
|
|
||||||
.to_str()?;
|
|
||||||
let request_method = req
|
|
||||||
.headers()
|
|
||||||
.get(ACCESS_CONTROL_REQUEST_METHOD)
|
|
||||||
.ok_or_bad_request("Missing Access-Control-Request-Method header")?
|
|
||||||
.to_str()?;
|
|
||||||
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
|
|
||||||
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
|
|
||||||
None => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(cors_config) = bucket_params.cors_config.get() {
|
|
||||||
let matching_rule = cors_config
|
|
||||||
.iter()
|
|
||||||
.find(|rule| cors_rule_matches(rule, origin, request_method, request_headers.iter()));
|
|
||||||
if let Some(rule) = matching_rule {
|
|
||||||
let mut resp = Response::builder()
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.body(EmptyBody::new())?;
|
|
||||||
add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?;
|
|
||||||
return Ok(resp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(CommonError::Forbidden(
|
|
||||||
"This CORS request is not allowed.".into(),
|
|
||||||
))
|
|
||||||
}
|
|
|
@ -1,12 +0,0 @@
|
||||||
//! Crate for serving a S3 compatible API
|
|
||||||
#[macro_use]
|
|
||||||
extern crate tracing;
|
|
||||||
|
|
||||||
pub mod common_error;
|
|
||||||
|
|
||||||
pub mod cors;
|
|
||||||
pub mod encoding;
|
|
||||||
pub mod generic_server;
|
|
||||||
pub mod helpers;
|
|
||||||
pub mod router_macros;
|
|
||||||
pub mod signature;
|
|
|
@ -1,5 +1,3 @@
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
|
||||||
use err_derive::Error;
|
use err_derive::Error;
|
||||||
use hyper::StatusCode;
|
use hyper::StatusCode;
|
||||||
|
|
||||||
|
@ -57,35 +55,6 @@ pub enum CommonError {
|
||||||
InvalidBucketName(String),
|
InvalidBucketName(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! commonErrorDerivative {
|
|
||||||
( $error_struct: ident ) => {
|
|
||||||
impl From<garage_util::error::Error> for $error_struct {
|
|
||||||
fn from(err: garage_util::error::Error) -> Self {
|
|
||||||
Self::Common(CommonError::InternalError(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<http::Error> for $error_struct {
|
|
||||||
fn from(err: http::Error) -> Self {
|
|
||||||
Self::Common(CommonError::Http(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<hyper::Error> for $error_struct {
|
|
||||||
fn from(err: hyper::Error) -> Self {
|
|
||||||
Self::Common(CommonError::Hyper(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<hyper::header::ToStrError> for $error_struct {
|
|
||||||
fn from(err: hyper::header::ToStrError) -> Self {
|
|
||||||
Self::Common(CommonError::InvalidHeader(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl CommonErrorDerivative for $error_struct {}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub use commonErrorDerivative;
|
|
||||||
|
|
||||||
impl CommonError {
|
impl CommonError {
|
||||||
pub fn http_status_code(&self) -> StatusCode {
|
pub fn http_status_code(&self) -> StatusCode {
|
||||||
match self {
|
match self {
|
||||||
|
@ -128,39 +97,18 @@ impl CommonError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<HelperError> for CommonError {
|
impl From<HelperError> for CommonError {
|
||||||
type Error = HelperError;
|
fn from(err: HelperError) -> Self {
|
||||||
|
|
||||||
fn try_from(err: HelperError) -> Result<Self, HelperError> {
|
|
||||||
match err {
|
match err {
|
||||||
HelperError::Internal(i) => Ok(Self::InternalError(i)),
|
HelperError::Internal(i) => Self::InternalError(i),
|
||||||
HelperError::BadRequest(b) => Ok(Self::BadRequest(b)),
|
HelperError::BadRequest(b) => Self::BadRequest(b),
|
||||||
HelperError::InvalidBucketName(n) => Ok(Self::InvalidBucketName(n)),
|
HelperError::InvalidBucketName(n) => Self::InvalidBucketName(n),
|
||||||
HelperError::NoSuchBucket(n) => Ok(Self::NoSuchBucket(n)),
|
HelperError::NoSuchBucket(n) => Self::NoSuchBucket(n),
|
||||||
e => Err(e),
|
e => Self::bad_request(format!("{}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function converts HelperErrors into CommonErrors,
|
|
||||||
/// for variants that exist in CommonError.
|
|
||||||
/// This is used for helper functions that might return InvalidBucketName
|
|
||||||
/// or NoSuchBucket for instance, and we want to pass that error
|
|
||||||
/// up to our caller.
|
|
||||||
pub fn pass_helper_error(err: HelperError) -> CommonError {
|
|
||||||
match CommonError::try_from(err) {
|
|
||||||
Ok(e) => e,
|
|
||||||
Err(e) => panic!("Helper error `{}` should hot have happenned here", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn helper_error_as_internal(err: HelperError) -> CommonError {
|
|
||||||
match err {
|
|
||||||
HelperError::Internal(e) => CommonError::InternalError(e),
|
|
||||||
e => CommonError::InternalError(GarageError::Message(e.to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait CommonErrorDerivative: From<CommonError> {
|
pub trait CommonErrorDerivative: From<CommonError> {
|
||||||
fn internal_error<M: ToString>(msg: M) -> Self {
|
fn internal_error<M: ToString>(msg: M) -> Self {
|
||||||
Self::from(CommonError::InternalError(GarageError::Message(
|
Self::from(CommonError::InternalError(GarageError::Message(
|
|
@ -4,6 +4,8 @@ use std::os::unix::fs::PermissionsExt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use futures::future::Future;
|
use futures::future::Future;
|
||||||
use futures::stream::{futures_unordered::FuturesUnordered, StreamExt};
|
use futures::stream::{futures_unordered::FuturesUnordered, StreamExt};
|
||||||
|
|
||||||
|
@ -34,7 +36,7 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
|
||||||
|
|
||||||
use crate::helpers::{BoxBody, ErrorBody};
|
use crate::helpers::{BoxBody, ErrorBody};
|
||||||
|
|
||||||
pub trait ApiEndpoint: Send + Sync + 'static {
|
pub(crate) trait ApiEndpoint: Send + Sync + 'static {
|
||||||
fn name(&self) -> &'static str;
|
fn name(&self) -> &'static str;
|
||||||
fn add_span_attributes(&self, span: SpanRef<'_>);
|
fn add_span_attributes(&self, span: SpanRef<'_>);
|
||||||
}
|
}
|
||||||
|
@ -45,7 +47,8 @@ pub trait ApiError: std::error::Error + Send + Sync + 'static {
|
||||||
fn http_body(&self, garage_region: &str, path: &str) -> ErrorBody;
|
fn http_body(&self, garage_region: &str, path: &str) -> ErrorBody;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ApiHandler: Send + Sync + 'static {
|
#[async_trait]
|
||||||
|
pub(crate) trait ApiHandler: Send + Sync + 'static {
|
||||||
const API_NAME: &'static str;
|
const API_NAME: &'static str;
|
||||||
const API_NAME_DISPLAY: &'static str;
|
const API_NAME_DISPLAY: &'static str;
|
||||||
|
|
||||||
|
@ -53,14 +56,14 @@ pub trait ApiHandler: Send + Sync + 'static {
|
||||||
type Error: ApiError;
|
type Error: ApiError;
|
||||||
|
|
||||||
fn parse_endpoint(&self, r: &Request<IncomingBody>) -> Result<Self::Endpoint, Self::Error>;
|
fn parse_endpoint(&self, r: &Request<IncomingBody>) -> Result<Self::Endpoint, Self::Error>;
|
||||||
fn handle(
|
async fn handle(
|
||||||
&self,
|
&self,
|
||||||
req: Request<IncomingBody>,
|
req: Request<IncomingBody>,
|
||||||
endpoint: Self::Endpoint,
|
endpoint: Self::Endpoint,
|
||||||
) -> impl Future<Output = Result<Response<BoxBody<Self::Error>>, Self::Error>> + Send;
|
) -> Result<Response<BoxBody<Self::Error>>, Self::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ApiServer<A: ApiHandler> {
|
pub(crate) struct ApiServer<A: ApiHandler> {
|
||||||
region: String,
|
region: String,
|
||||||
api_handler: A,
|
api_handler: A,
|
||||||
|
|
||||||
|
@ -245,11 +248,13 @@ impl<A: ApiHandler> ApiServer<A> {
|
||||||
|
|
||||||
// ==== helper functions ====
|
// ==== helper functions ====
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
pub trait Accept: Send + Sync + 'static {
|
pub trait Accept: Send + Sync + 'static {
|
||||||
type Stream: AsyncRead + AsyncWrite + Send + Sync + 'static;
|
type Stream: AsyncRead + AsyncWrite + Send + Sync + 'static;
|
||||||
fn accept(&self) -> impl Future<Output = std::io::Result<(Self::Stream, String)>> + Send;
|
async fn accept(&self) -> std::io::Result<(Self::Stream, String)>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl Accept for TcpListener {
|
impl Accept for TcpListener {
|
||||||
type Stream = TcpStream;
|
type Stream = TcpStream;
|
||||||
async fn accept(&self) -> std::io::Result<(Self::Stream, String)> {
|
async fn accept(&self) -> std::io::Result<(Self::Stream, String)> {
|
||||||
|
@ -261,6 +266,7 @@ impl Accept for TcpListener {
|
||||||
|
|
||||||
pub struct UnixListenerOn(pub UnixListener, pub String);
|
pub struct UnixListenerOn(pub UnixListener, pub String);
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl Accept for UnixListenerOn {
|
impl Accept for UnixListenerOn {
|
||||||
type Stream = UnixStream;
|
type Stream = UnixStream;
|
||||||
async fn accept(&self) -> std::io::Result<(Self::Stream, String)> {
|
async fn accept(&self) -> std::io::Result<(Self::Stream, String)> {
|
|
@ -363,9 +363,9 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct CustomApiErrorBody {
|
pub(crate) struct CustomApiErrorBody {
|
||||||
pub code: String,
|
pub(crate) code: String,
|
||||||
pub message: String,
|
pub(crate) message: String,
|
||||||
pub region: String,
|
pub(crate) region: String,
|
||||||
pub path: String,
|
pub(crate) path: String,
|
||||||
}
|
}
|
|
@ -1,37 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "garage_api_k2v"
|
|
||||||
version = "1.0.1"
|
|
||||||
authors = ["Alex Auvolat <alex@adnab.me>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "AGPL-3.0"
|
|
||||||
description = "K2V API server crate for the Garage object store"
|
|
||||||
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
|
|
||||||
readme = "../../README.md"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "lib.rs"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
garage_model = { workspace = true, features = [ "k2v" ] }
|
|
||||||
garage_table.workspace = true
|
|
||||||
garage_util = { workspace = true, features = [ "k2v" ] }
|
|
||||||
garage_api_common.workspace = true
|
|
||||||
|
|
||||||
base64.workspace = true
|
|
||||||
err-derive.workspace = true
|
|
||||||
tracing.workspace = true
|
|
||||||
|
|
||||||
futures.workspace = true
|
|
||||||
tokio.workspace = true
|
|
||||||
http.workspace = true
|
|
||||||
http-body-util.workspace = true
|
|
||||||
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
|
|
||||||
percent-encoding.workspace = true
|
|
||||||
url.workspace = true
|
|
||||||
|
|
||||||
serde.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
|
|
||||||
opentelemetry.workspace = true
|
|
|
@ -1,5 +1,7 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use hyper::{body::Incoming as IncomingBody, Method, Request, Response};
|
use hyper::{body::Incoming as IncomingBody, Method, Request, Response};
|
||||||
use tokio::sync::watch;
|
use tokio::sync::watch;
|
||||||
|
|
||||||
|
@ -10,25 +12,26 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
use garage_api_common::cors::*;
|
use crate::generic_server::*;
|
||||||
use garage_api_common::generic_server::*;
|
use crate::k2v::error::*;
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::verify_request;
|
|
||||||
|
|
||||||
use crate::batch::*;
|
use crate::signature::verify_request;
|
||||||
use crate::error::*;
|
|
||||||
use crate::index::*;
|
|
||||||
use crate::item::*;
|
|
||||||
use crate::router::Endpoint;
|
|
||||||
|
|
||||||
pub use garage_api_common::signature::streaming::ReqBody;
|
use crate::helpers::*;
|
||||||
|
use crate::k2v::batch::*;
|
||||||
|
use crate::k2v::index::*;
|
||||||
|
use crate::k2v::item::*;
|
||||||
|
use crate::k2v::router::Endpoint;
|
||||||
|
use crate::s3::cors::*;
|
||||||
|
|
||||||
|
pub use crate::signature::streaming::ReqBody;
|
||||||
pub type ResBody = BoxBody<Error>;
|
pub type ResBody = BoxBody<Error>;
|
||||||
|
|
||||||
pub struct K2VApiServer {
|
pub struct K2VApiServer {
|
||||||
garage: Arc<Garage>,
|
garage: Arc<Garage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct K2VApiEndpoint {
|
pub(crate) struct K2VApiEndpoint {
|
||||||
bucket_name: String,
|
bucket_name: String,
|
||||||
endpoint: Endpoint,
|
endpoint: Endpoint,
|
||||||
}
|
}
|
||||||
|
@ -46,6 +49,7 @@ impl K2VApiServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl ApiHandler for K2VApiServer {
|
impl ApiHandler for K2VApiServer {
|
||||||
const API_NAME: &'static str = "k2v";
|
const API_NAME: &'static str = "k2v";
|
||||||
const API_NAME_DISPLAY: &'static str = "K2V";
|
const API_NAME_DISPLAY: &'static str = "K2V";
|
||||||
|
@ -86,13 +90,11 @@ impl ApiHandler for K2VApiServer {
|
||||||
let bucket_id = garage
|
let bucket_id = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.resolve_bucket(&bucket_name, &api_key)
|
.resolve_bucket(&bucket_name, &api_key)
|
||||||
.await
|
.await?;
|
||||||
.map_err(pass_helper_error)?;
|
|
||||||
let bucket = garage
|
let bucket = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.get_existing_bucket(bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
.await
|
.await?;
|
||||||
.map_err(helper_error_as_internal)?;
|
|
||||||
let bucket_params = bucket.state.into_option().unwrap();
|
let bucket_params = bucket.state.into_option().unwrap();
|
||||||
|
|
||||||
let allowed = match endpoint.authorization_type() {
|
let allowed = match endpoint.authorization_type() {
|
||||||
|
|
|
@ -4,14 +4,13 @@ use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use garage_table::{EnumerationOrder, TableSchema};
|
use garage_table::{EnumerationOrder, TableSchema};
|
||||||
|
|
||||||
|
use garage_model::k2v::causality::*;
|
||||||
use garage_model::k2v::item_table::*;
|
use garage_model::k2v::item_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::k2v::api_server::{ReqBody, ResBody};
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::k2v::error::*;
|
||||||
use crate::error::*;
|
use crate::k2v::range::read_range;
|
||||||
use crate::item::parse_causality_token;
|
|
||||||
use crate::range::read_range;
|
|
||||||
|
|
||||||
pub async fn handle_insert_batch(
|
pub async fn handle_insert_batch(
|
||||||
ctx: ReqCtx,
|
ctx: ReqCtx,
|
||||||
|
@ -24,7 +23,7 @@ pub async fn handle_insert_batch(
|
||||||
|
|
||||||
let mut items2 = vec![];
|
let mut items2 = vec![];
|
||||||
for it in items {
|
for it in items {
|
||||||
let ct = it.ct.map(|s| parse_causality_token(&s)).transpose()?;
|
let ct = it.ct.map(|s| CausalContext::parse_helper(&s)).transpose()?;
|
||||||
let v = match it.v {
|
let v = match it.v {
|
||||||
Some(vs) => DvvsValue::Value(
|
Some(vs) => DvvsValue::Value(
|
||||||
BASE64_STANDARD
|
BASE64_STANDARD
|
||||||
|
@ -282,8 +281,7 @@ pub(crate) async fn handle_poll_range(
|
||||||
query.seen_marker,
|
query.seen_marker,
|
||||||
timeout_msec,
|
timeout_msec,
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
.map_err(pass_helper_error)?;
|
|
||||||
|
|
||||||
if let Some((items, seen_marker)) = resp {
|
if let Some((items, seen_marker)) = resp {
|
||||||
let resp = PollRangeResponse {
|
let resp = PollRangeResponse {
|
||||||
|
|
|
@ -2,21 +2,18 @@ use err_derive::Error;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
use garage_api_common::common_error::{commonErrorDerivative, CommonError};
|
use crate::common_error::CommonError;
|
||||||
pub(crate) use garage_api_common::common_error::{helper_error_as_internal, pass_helper_error};
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
pub use garage_api_common::common_error::{
|
use crate::generic_server::ApiError;
|
||||||
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
|
use crate::helpers::*;
|
||||||
};
|
use crate::signature::error::Error as SignatureError;
|
||||||
use garage_api_common::generic_server::ApiError;
|
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::error::Error as SignatureError;
|
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(display = "{}", _0)]
|
#[error(display = "{}", _0)]
|
||||||
/// Error from common error
|
/// Error from common error
|
||||||
Common(#[error(source)] CommonError),
|
Common(CommonError),
|
||||||
|
|
||||||
// Category: cannot process
|
// Category: cannot process
|
||||||
/// Authorization Header Malformed
|
/// Authorization Header Malformed
|
||||||
|
@ -31,10 +28,6 @@ pub enum Error {
|
||||||
#[error(display = "Invalid base64: {}", _0)]
|
#[error(display = "Invalid base64: {}", _0)]
|
||||||
InvalidBase64(#[error(source)] base64::DecodeError),
|
InvalidBase64(#[error(source)] base64::DecodeError),
|
||||||
|
|
||||||
/// Invalid causality token
|
|
||||||
#[error(display = "Invalid causality token")]
|
|
||||||
InvalidCausalityToken,
|
|
||||||
|
|
||||||
/// The client asked for an invalid return format (invalid Accept header)
|
/// The client asked for an invalid return format (invalid Accept header)
|
||||||
#[error(display = "Not acceptable: {}", _0)]
|
#[error(display = "Not acceptable: {}", _0)]
|
||||||
NotAcceptable(String),
|
NotAcceptable(String),
|
||||||
|
@ -44,7 +37,16 @@ pub enum Error {
|
||||||
InvalidUtf8Str(#[error(source)] std::str::Utf8Error),
|
InvalidUtf8Str(#[error(source)] std::str::Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
commonErrorDerivative!(Error);
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
|
CommonError: From<T>,
|
||||||
|
{
|
||||||
|
fn from(err: T) -> Self {
|
||||||
|
Error::Common(CommonError::from(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
impl From<SignatureError> for Error {
|
impl From<SignatureError> for Error {
|
||||||
fn from(err: SignatureError) -> Self {
|
fn from(err: SignatureError) -> Self {
|
||||||
|
@ -70,7 +72,6 @@ impl Error {
|
||||||
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
|
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
|
||||||
Error::InvalidBase64(_) => "InvalidBase64",
|
Error::InvalidBase64(_) => "InvalidBase64",
|
||||||
Error::InvalidUtf8Str(_) => "InvalidUtf8String",
|
Error::InvalidUtf8Str(_) => "InvalidUtf8String",
|
||||||
Error::InvalidCausalityToken => "CausalityToken",
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,8 +85,7 @@ impl ApiError for Error {
|
||||||
Error::NotAcceptable(_) => StatusCode::NOT_ACCEPTABLE,
|
Error::NotAcceptable(_) => StatusCode::NOT_ACCEPTABLE,
|
||||||
Error::AuthorizationHeaderMalformed(_)
|
Error::AuthorizationHeaderMalformed(_)
|
||||||
| Error::InvalidBase64(_)
|
| Error::InvalidBase64(_)
|
||||||
| Error::InvalidUtf8Str(_)
|
| Error::InvalidUtf8Str(_) => StatusCode::BAD_REQUEST,
|
||||||
| Error::InvalidCausalityToken => StatusCode::BAD_REQUEST,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,10 @@ use garage_table::util::*;
|
||||||
|
|
||||||
use garage_model::k2v::item_table::{BYTES, CONFLICTS, ENTRIES, VALUES};
|
use garage_model::k2v::item_table::{BYTES, CONFLICTS, ENTRIES, VALUES};
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::k2v::api_server::ResBody;
|
||||||
use crate::api_server::ResBody;
|
use crate::k2v::error::*;
|
||||||
use crate::error::*;
|
use crate::k2v::range::read_range;
|
||||||
use crate::range::read_range;
|
|
||||||
|
|
||||||
pub async fn handle_read_index(
|
pub async fn handle_read_index(
|
||||||
ctx: ReqCtx,
|
ctx: ReqCtx,
|
||||||
|
|
|
@ -6,10 +6,9 @@ use hyper::{Request, Response, StatusCode};
|
||||||
use garage_model::k2v::causality::*;
|
use garage_model::k2v::causality::*;
|
||||||
use garage_model::k2v::item_table::*;
|
use garage_model::k2v::item_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::k2v::api_server::{ReqBody, ResBody};
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::k2v::error::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
pub const X_GARAGE_CAUSALITY_TOKEN: &str = "X-Garage-Causality-Token";
|
pub const X_GARAGE_CAUSALITY_TOKEN: &str = "X-Garage-Causality-Token";
|
||||||
|
|
||||||
|
@ -19,10 +18,6 @@ pub enum ReturnFormat {
|
||||||
Either,
|
Either,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_causality_token(s: &str) -> Result<CausalContext, Error> {
|
|
||||||
CausalContext::parse(s).ok_or(Error::InvalidCausalityToken)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReturnFormat {
|
impl ReturnFormat {
|
||||||
pub fn from(req: &Request<ReqBody>) -> Result<Self, Error> {
|
pub fn from(req: &Request<ReqBody>) -> Result<Self, Error> {
|
||||||
let accept = match req.headers().get(header::ACCEPT) {
|
let accept = match req.headers().get(header::ACCEPT) {
|
||||||
|
@ -141,7 +136,7 @@ pub async fn handle_insert_item(
|
||||||
.get(X_GARAGE_CAUSALITY_TOKEN)
|
.get(X_GARAGE_CAUSALITY_TOKEN)
|
||||||
.map(|s| s.to_str())
|
.map(|s| s.to_str())
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.map(parse_causality_token)
|
.map(CausalContext::parse_helper)
|
||||||
.transpose()?;
|
.transpose()?;
|
||||||
|
|
||||||
let body = http_body_util::BodyExt::collect(req.into_body())
|
let body = http_body_util::BodyExt::collect(req.into_body())
|
||||||
|
@ -181,7 +176,7 @@ pub async fn handle_delete_item(
|
||||||
.get(X_GARAGE_CAUSALITY_TOKEN)
|
.get(X_GARAGE_CAUSALITY_TOKEN)
|
||||||
.map(|s| s.to_str())
|
.map(|s| s.to_str())
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.map(parse_causality_token)
|
.map(CausalContext::parse_helper)
|
||||||
.transpose()?;
|
.transpose()?;
|
||||||
|
|
||||||
let value = DvvsValue::Deleted;
|
let value = DvvsValue::Deleted;
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#[macro_use]
|
|
||||||
extern crate tracing;
|
|
||||||
|
|
||||||
pub mod api_server;
|
pub mod api_server;
|
||||||
mod error;
|
mod error;
|
||||||
mod router;
|
mod router;
|
|
@ -7,9 +7,8 @@ use std::sync::Arc;
|
||||||
use garage_table::replication::TableShardedReplication;
|
use garage_table::replication::TableShardedReplication;
|
||||||
use garage_table::*;
|
use garage_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::key_after_prefix;
|
use crate::helpers::key_after_prefix;
|
||||||
|
use crate::k2v::error::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
/// Read range in a Garage table.
|
/// Read range in a Garage table.
|
||||||
/// Returns (entries, more?, nextStart)
|
/// Returns (entries, more?, nextStart)
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use crate::error::*;
|
use crate::k2v::error::*;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use hyper::{Method, Request};
|
use hyper::{Method, Request};
|
||||||
|
|
||||||
use garage_api_common::helpers::Authorization;
|
use crate::helpers::Authorization;
|
||||||
use garage_api_common::router_macros::{generateQueryParameters, router_match};
|
use crate::router_macros::{generateQueryParameters, router_match};
|
||||||
|
|
||||||
router_match! {@func
|
router_match! {@func
|
||||||
|
|
||||||
|
|
17
src/api/lib.rs
Normal file
17
src/api/lib.rs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
//! Crate for serving a S3 compatible API
|
||||||
|
#[macro_use]
|
||||||
|
extern crate tracing;
|
||||||
|
|
||||||
|
pub mod common_error;
|
||||||
|
|
||||||
|
mod encoding;
|
||||||
|
pub mod generic_server;
|
||||||
|
pub mod helpers;
|
||||||
|
mod router_macros;
|
||||||
|
/// This mode is public only to help testing. Don't expect stability here
|
||||||
|
pub mod signature;
|
||||||
|
|
||||||
|
pub mod admin;
|
||||||
|
#[cfg(feature = "k2v")]
|
||||||
|
pub mod k2v;
|
||||||
|
pub mod s3;
|
|
@ -1,6 +1,5 @@
|
||||||
/// This macro is used to generate very repetitive match {} blocks in this module
|
/// This macro is used to generate very repetitive match {} blocks in this module
|
||||||
/// It is _not_ made to be used anywhere else
|
/// It is _not_ made to be used anywhere else
|
||||||
#[macro_export]
|
|
||||||
macro_rules! router_match {
|
macro_rules! router_match {
|
||||||
(@match $enum:expr , [ $($endpoint:ident,)* ]) => {{
|
(@match $enum:expr , [ $($endpoint:ident,)* ]) => {{
|
||||||
// usage: router_match {@match my_enum, [ VariantWithField1, VariantWithField2 ..] }
|
// usage: router_match {@match my_enum, [ VariantWithField1, VariantWithField2 ..] }
|
||||||
|
@ -134,7 +133,6 @@ macro_rules! router_match {
|
||||||
|
|
||||||
/// This macro is used to generate part of the code in this module. It must be called only one, and
|
/// This macro is used to generate part of the code in this module. It must be called only one, and
|
||||||
/// is useless outside of this module.
|
/// is useless outside of this module.
|
||||||
#[macro_export]
|
|
||||||
macro_rules! generateQueryParameters {
|
macro_rules! generateQueryParameters {
|
||||||
(
|
(
|
||||||
keywords: [ $($kw_param:expr => $kw_name: ident),* ],
|
keywords: [ $($kw_param:expr => $kw_name: ident),* ],
|
||||||
|
@ -222,5 +220,5 @@ macro_rules! generateQueryParameters {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub use generateQueryParameters;
|
pub(crate) use generateQueryParameters;
|
||||||
pub use router_match;
|
pub(crate) use router_match;
|
|
@ -1,5 +1,7 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use hyper::header;
|
use hyper::header;
|
||||||
use hyper::{body::Incoming as IncomingBody, Request, Response};
|
use hyper::{body::Incoming as IncomingBody, Request, Response};
|
||||||
use tokio::sync::watch;
|
use tokio::sync::watch;
|
||||||
|
@ -12,33 +14,33 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::key_table::Key;
|
use garage_model::key_table::Key;
|
||||||
|
|
||||||
use garage_api_common::cors::*;
|
use crate::generic_server::*;
|
||||||
use garage_api_common::generic_server::*;
|
use crate::s3::error::*;
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::verify_request;
|
|
||||||
|
|
||||||
use crate::bucket::*;
|
use crate::signature::verify_request;
|
||||||
use crate::copy::*;
|
|
||||||
use crate::cors::*;
|
|
||||||
use crate::delete::*;
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::get::*;
|
|
||||||
use crate::lifecycle::*;
|
|
||||||
use crate::list::*;
|
|
||||||
use crate::multipart::*;
|
|
||||||
use crate::post_object::handle_post_object;
|
|
||||||
use crate::put::*;
|
|
||||||
use crate::router::Endpoint;
|
|
||||||
use crate::website::*;
|
|
||||||
|
|
||||||
pub use garage_api_common::signature::streaming::ReqBody;
|
use crate::helpers::*;
|
||||||
|
use crate::s3::bucket::*;
|
||||||
|
use crate::s3::copy::*;
|
||||||
|
use crate::s3::cors::*;
|
||||||
|
use crate::s3::delete::*;
|
||||||
|
use crate::s3::get::*;
|
||||||
|
use crate::s3::lifecycle::*;
|
||||||
|
use crate::s3::list::*;
|
||||||
|
use crate::s3::multipart::*;
|
||||||
|
use crate::s3::post_object::handle_post_object;
|
||||||
|
use crate::s3::put::*;
|
||||||
|
use crate::s3::router::Endpoint;
|
||||||
|
use crate::s3::website::*;
|
||||||
|
|
||||||
|
pub use crate::signature::streaming::ReqBody;
|
||||||
pub type ResBody = BoxBody<Error>;
|
pub type ResBody = BoxBody<Error>;
|
||||||
|
|
||||||
pub struct S3ApiServer {
|
pub struct S3ApiServer {
|
||||||
garage: Arc<Garage>,
|
garage: Arc<Garage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct S3ApiEndpoint {
|
pub(crate) struct S3ApiEndpoint {
|
||||||
bucket_name: Option<String>,
|
bucket_name: Option<String>,
|
||||||
endpoint: Endpoint,
|
endpoint: Endpoint,
|
||||||
}
|
}
|
||||||
|
@ -68,6 +70,7 @@ impl S3ApiServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl ApiHandler for S3ApiServer {
|
impl ApiHandler for S3ApiServer {
|
||||||
const API_NAME: &'static str = "s3";
|
const API_NAME: &'static str = "s3";
|
||||||
const API_NAME_DISPLAY: &'static str = "S3";
|
const API_NAME_DISPLAY: &'static str = "S3";
|
||||||
|
@ -147,8 +150,7 @@ impl ApiHandler for S3ApiServer {
|
||||||
let bucket_id = garage
|
let bucket_id = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.resolve_bucket(&bucket_name, &api_key)
|
.resolve_bucket(&bucket_name, &api_key)
|
||||||
.await
|
.await?;
|
||||||
.map_err(pass_helper_error)?;
|
|
||||||
let bucket = garage
|
let bucket = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.get_existing_bucket(bucket_id)
|
.get_existing_bucket(bucket_id)
|
||||||
|
|
|
@ -13,13 +13,12 @@ use garage_util::crdt::*;
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
use garage_util::time::*;
|
use garage_util::time::*;
|
||||||
|
|
||||||
use garage_api_common::common_error::CommonError;
|
use crate::common_error::CommonError;
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
use garage_api_common::signature::verify_signed_content;
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::error::*;
|
use crate::signature::verify_signed_content;
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
pub fn handle_get_bucket_location(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
pub fn handle_get_bucket_location(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
||||||
let ReqCtx { garage, .. } = ctx;
|
let ReqCtx { garage, .. } = ctx;
|
||||||
|
|
|
@ -15,7 +15,7 @@ use garage_util::error::OkOrMessage;
|
||||||
|
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::s3::error::*;
|
||||||
|
|
||||||
pub const X_AMZ_CHECKSUM_ALGORITHM: HeaderName =
|
pub const X_AMZ_CHECKSUM_ALGORITHM: HeaderName =
|
||||||
HeaderName::from_static("x-amz-checksum-algorithm");
|
HeaderName::from_static("x-amz-checksum-algorithm");
|
||||||
|
|
|
@ -20,16 +20,15 @@ use garage_model::s3::mpu_table::*;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::checksum::*;
|
||||||
use crate::checksum::*;
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::error::*;
|
||||||
use crate::error::*;
|
use crate::s3::get::full_object_byte_stream;
|
||||||
use crate::get::full_object_byte_stream;
|
use crate::s3::multipart;
|
||||||
use crate::multipart;
|
use crate::s3::put::{get_headers, save_stream, ChecksumMode, SaveStreamResult};
|
||||||
use crate::put::{get_headers, save_stream, ChecksumMode, SaveStreamResult};
|
use crate::s3::xml::{self as s3_xml, xmlns_tag};
|
||||||
use crate::xml::{self as s3_xml, xmlns_tag};
|
|
||||||
|
|
||||||
// -------- CopyObject ---------
|
// -------- CopyObject ---------
|
||||||
|
|
||||||
|
@ -656,8 +655,7 @@ async fn get_copy_source(ctx: &ReqCtx, req: &Request<ReqBody>) -> Result<Object,
|
||||||
let source_bucket_id = garage
|
let source_bucket_id = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.resolve_bucket(&source_bucket.to_string(), api_key)
|
.resolve_bucket(&source_bucket.to_string(), api_key)
|
||||||
.await
|
.await?;
|
||||||
.map_err(pass_helper_error)?;
|
|
||||||
|
|
||||||
if !api_key.allow_read(&source_bucket_id) {
|
if !api_key.allow_read(&source_bucket_id) {
|
||||||
return Err(Error::forbidden(format!(
|
return Err(Error::forbidden(format!(
|
||||||
|
@ -863,7 +861,7 @@ pub struct CopyPartResult {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::xml::to_xml_with_header;
|
use crate::s3::xml::to_xml_with_header;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn copy_object_result() -> Result<(), Error> {
|
fn copy_object_result() -> Result<(), Error> {
|
||||||
|
|
|
@ -1,21 +1,30 @@
|
||||||
use quick_xml::de::from_reader;
|
use quick_xml::de::from_reader;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hyper::{header::HeaderName, Method, Request, Response, StatusCode};
|
use http::header::{
|
||||||
|
ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||||
|
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD,
|
||||||
|
};
|
||||||
|
use hyper::{
|
||||||
|
body::Body, body::Incoming as IncomingBody, header::HeaderName, Method, Request, Response,
|
||||||
|
StatusCode,
|
||||||
|
};
|
||||||
|
|
||||||
use http_body_util::BodyExt;
|
use http_body_util::BodyExt;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use garage_model::bucket_table::{Bucket, CorsRule as GarageCorsRule};
|
use crate::common_error::CommonError;
|
||||||
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::error::*;
|
||||||
|
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
||||||
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
|
use garage_model::bucket_table::{Bucket, BucketParams, CorsRule as GarageCorsRule};
|
||||||
|
use garage_model::garage::Garage;
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::verify_signed_content;
|
|
||||||
|
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
|
||||||
|
|
||||||
pub async fn handle_get_cors(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
pub async fn handle_get_cors(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
||||||
let ReqCtx { bucket_params, .. } = ctx;
|
let ReqCtx { bucket_params, .. } = ctx;
|
||||||
if let Some(cors) = bucket_params.cors_config.get() {
|
if let Some(cors) = bucket_params.cors_config.get() {
|
||||||
|
@ -90,6 +99,154 @@ pub async fn handle_put_cors(
|
||||||
.body(empty_body())?)
|
.body(empty_body())?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn handle_options_api(
|
||||||
|
garage: Arc<Garage>,
|
||||||
|
req: &Request<IncomingBody>,
|
||||||
|
bucket_name: Option<String>,
|
||||||
|
) -> Result<Response<EmptyBody>, CommonError> {
|
||||||
|
// FIXME: CORS rules of buckets with local aliases are
|
||||||
|
// not taken into account.
|
||||||
|
|
||||||
|
// If the bucket name is a global bucket name,
|
||||||
|
// we try to apply the CORS rules of that bucket.
|
||||||
|
// If a user has a local bucket name that has
|
||||||
|
// the same name, its CORS rules won't be applied
|
||||||
|
// and will be shadowed by the rules of the globally
|
||||||
|
// existing bucket (but this is inevitable because
|
||||||
|
// OPTIONS calls are not auhtenticated).
|
||||||
|
if let Some(bn) = bucket_name {
|
||||||
|
let helper = garage.bucket_helper();
|
||||||
|
let bucket_id = helper.resolve_global_bucket_name(&bn).await?;
|
||||||
|
if let Some(id) = bucket_id {
|
||||||
|
let bucket = garage.bucket_helper().get_existing_bucket(id).await?;
|
||||||
|
let bucket_params = bucket.state.into_option().unwrap();
|
||||||
|
handle_options_for_bucket(req, &bucket_params)
|
||||||
|
} else {
|
||||||
|
// If there is a bucket name in the request, but that name
|
||||||
|
// does not correspond to a global alias for a bucket,
|
||||||
|
// then it's either a non-existing bucket or a local bucket.
|
||||||
|
// We have no way of knowing, because the request is not
|
||||||
|
// authenticated and thus we can't resolve local aliases.
|
||||||
|
// We take the permissive approach of allowing everything,
|
||||||
|
// because we don't want to prevent web apps that use
|
||||||
|
// local bucket names from making API calls.
|
||||||
|
Ok(Response::builder()
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_METHODS, "*")
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(EmptyBody::new())?)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If there is no bucket name in the request,
|
||||||
|
// we are doing a ListBuckets call, which we want to allow
|
||||||
|
// for all origins.
|
||||||
|
Ok(Response::builder()
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
|
||||||
|
.header(ACCESS_CONTROL_ALLOW_METHODS, "GET")
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(EmptyBody::new())?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handle_options_for_bucket(
|
||||||
|
req: &Request<IncomingBody>,
|
||||||
|
bucket_params: &BucketParams,
|
||||||
|
) -> Result<Response<EmptyBody>, CommonError> {
|
||||||
|
let origin = req
|
||||||
|
.headers()
|
||||||
|
.get("Origin")
|
||||||
|
.ok_or_bad_request("Missing Origin header")?
|
||||||
|
.to_str()?;
|
||||||
|
let request_method = req
|
||||||
|
.headers()
|
||||||
|
.get(ACCESS_CONTROL_REQUEST_METHOD)
|
||||||
|
.ok_or_bad_request("Missing Access-Control-Request-Method header")?
|
||||||
|
.to_str()?;
|
||||||
|
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
|
||||||
|
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
|
||||||
|
None => vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(cors_config) = bucket_params.cors_config.get() {
|
||||||
|
let matching_rule = cors_config
|
||||||
|
.iter()
|
||||||
|
.find(|rule| cors_rule_matches(rule, origin, request_method, request_headers.iter()));
|
||||||
|
if let Some(rule) = matching_rule {
|
||||||
|
let mut resp = Response::builder()
|
||||||
|
.status(StatusCode::OK)
|
||||||
|
.body(EmptyBody::new())?;
|
||||||
|
add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?;
|
||||||
|
return Ok(resp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(CommonError::Forbidden(
|
||||||
|
"This CORS request is not allowed.".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_matching_cors_rule<'a>(
|
||||||
|
bucket_params: &'a BucketParams,
|
||||||
|
req: &Request<impl Body>,
|
||||||
|
) -> Result<Option<&'a GarageCorsRule>, Error> {
|
||||||
|
if let Some(cors_config) = bucket_params.cors_config.get() {
|
||||||
|
if let Some(origin) = req.headers().get("Origin") {
|
||||||
|
let origin = origin.to_str()?;
|
||||||
|
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
|
||||||
|
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
|
||||||
|
None => vec![],
|
||||||
|
};
|
||||||
|
return Ok(cors_config.iter().find(|rule| {
|
||||||
|
cors_rule_matches(rule, origin, req.method().as_ref(), request_headers.iter())
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cors_rule_matches<'a, HI, S>(
|
||||||
|
rule: &GarageCorsRule,
|
||||||
|
origin: &'a str,
|
||||||
|
method: &'a str,
|
||||||
|
mut request_headers: HI,
|
||||||
|
) -> bool
|
||||||
|
where
|
||||||
|
HI: Iterator<Item = S>,
|
||||||
|
S: AsRef<str>,
|
||||||
|
{
|
||||||
|
rule.allow_origins.iter().any(|x| x == "*" || x == origin)
|
||||||
|
&& rule.allow_methods.iter().any(|x| x == "*" || x == method)
|
||||||
|
&& request_headers.all(|h| {
|
||||||
|
rule.allow_headers
|
||||||
|
.iter()
|
||||||
|
.any(|x| x == "*" || x == h.as_ref())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_cors_headers(
|
||||||
|
resp: &mut Response<impl Body>,
|
||||||
|
rule: &GarageCorsRule,
|
||||||
|
) -> Result<(), http::header::InvalidHeaderValue> {
|
||||||
|
let h = resp.headers_mut();
|
||||||
|
h.insert(
|
||||||
|
ACCESS_CONTROL_ALLOW_ORIGIN,
|
||||||
|
rule.allow_origins.join(", ").parse()?,
|
||||||
|
);
|
||||||
|
h.insert(
|
||||||
|
ACCESS_CONTROL_ALLOW_METHODS,
|
||||||
|
rule.allow_methods.join(", ").parse()?,
|
||||||
|
);
|
||||||
|
h.insert(
|
||||||
|
ACCESS_CONTROL_ALLOW_HEADERS,
|
||||||
|
rule.allow_headers.join(", ").parse()?,
|
||||||
|
);
|
||||||
|
h.insert(
|
||||||
|
ACCESS_CONTROL_EXPOSE_HEADERS,
|
||||||
|
rule.expose_headers.join(", ").parse()?,
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
// ---- SERIALIZATION AND DESERIALIZATION TO/FROM S3 XML ----
|
// ---- SERIALIZATION AND DESERIALIZATION TO/FROM S3 XML ----
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
|
|
@ -5,13 +5,12 @@ use garage_util::data::*;
|
||||||
|
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
use garage_api_common::signature::verify_signed_content;
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::put::next_timestamp;
|
||||||
use crate::error::*;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::put::next_timestamp;
|
use crate::signature::verify_signed_content;
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
async fn handle_delete_internal(ctx: &ReqCtx, key: &str) -> Result<(Uuid, Uuid), Error> {
|
async fn handle_delete_internal(ctx: &ReqCtx, key: &str) -> Result<(Uuid, Uuid), Error> {
|
||||||
let ReqCtx {
|
let ReqCtx {
|
||||||
|
|
|
@ -28,10 +28,9 @@ use garage_util::migrate::Migrate;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::s3::object_table::{ObjectVersionEncryption, ObjectVersionMetaInner};
|
use garage_model::s3::object_table::{ObjectVersionEncryption, ObjectVersionMetaInner};
|
||||||
|
|
||||||
use garage_api_common::common_error::*;
|
use crate::common_error::*;
|
||||||
|
use crate::s3::checksum::Md5Checksum;
|
||||||
use crate::checksum::Md5Checksum;
|
use crate::s3::error::Error;
|
||||||
use crate::error::Error;
|
|
||||||
|
|
||||||
const X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM: HeaderName =
|
const X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM: HeaderName =
|
||||||
HeaderName::from_static("x-amz-server-side-encryption-customer-algorithm");
|
HeaderName::from_static("x-amz-server-side-encryption-customer-algorithm");
|
||||||
|
|
|
@ -4,30 +4,19 @@ use err_derive::Error;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
use garage_model::helper::error::Error as HelperError;
|
use crate::common_error::CommonError;
|
||||||
|
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
|
||||||
pub(crate) use garage_api_common::common_error::pass_helper_error;
|
use crate::generic_server::ApiError;
|
||||||
|
use crate::helpers::*;
|
||||||
use garage_api_common::common_error::{
|
use crate::s3::xml as s3_xml;
|
||||||
commonErrorDerivative, helper_error_as_internal, CommonError,
|
use crate::signature::error::Error as SignatureError;
|
||||||
};
|
|
||||||
|
|
||||||
pub use garage_api_common::common_error::{
|
|
||||||
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
|
|
||||||
};
|
|
||||||
|
|
||||||
use garage_api_common::generic_server::ApiError;
|
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::error::Error as SignatureError;
|
|
||||||
|
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(display = "{}", _0)]
|
#[error(display = "{}", _0)]
|
||||||
/// Error from common error
|
/// Error from common error
|
||||||
Common(#[error(source)] CommonError),
|
Common(CommonError),
|
||||||
|
|
||||||
// Category: cannot process
|
// Category: cannot process
|
||||||
/// Authorization Header Malformed
|
/// Authorization Header Malformed
|
||||||
|
@ -89,16 +78,17 @@ pub enum Error {
|
||||||
NotImplemented(String),
|
NotImplemented(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
commonErrorDerivative!(Error);
|
impl<T> From<T> for Error
|
||||||
|
where
|
||||||
// Helper errors are always passed as internal errors by default.
|
CommonError: From<T>,
|
||||||
// To pass the specific error code back to the client, use `pass_helper_error`.
|
{
|
||||||
impl From<HelperError> for Error {
|
fn from(err: T) -> Self {
|
||||||
fn from(err: HelperError) -> Error {
|
Error::Common(CommonError::from(err))
|
||||||
Error::Common(helper_error_as_internal(err))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CommonErrorDerivative for Error {}
|
||||||
|
|
||||||
impl From<roxmltree::Error> for Error {
|
impl From<roxmltree::Error> for Error {
|
||||||
fn from(err: roxmltree::Error) -> Self {
|
fn from(err: roxmltree::Error) -> Self {
|
||||||
Self::InvalidXml(format!("{}", err))
|
Self::InvalidXml(format!("{}", err))
|
||||||
|
|
|
@ -25,12 +25,11 @@ use garage_model::garage::Garage;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::ResBody;
|
||||||
use crate::api_server::ResBody;
|
use crate::s3::checksum::{add_checksum_response_headers, X_AMZ_CHECKSUM_MODE};
|
||||||
use crate::checksum::{add_checksum_response_headers, X_AMZ_CHECKSUM_MODE};
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::error::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
|
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
|
||||||
|
|
||||||
|
@ -69,11 +68,14 @@ fn object_headers(
|
||||||
// See: https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html
|
// See: https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html
|
||||||
let mut headers_by_name = BTreeMap::new();
|
let mut headers_by_name = BTreeMap::new();
|
||||||
for (name, value) in meta_inner.headers.iter() {
|
for (name, value) in meta_inner.headers.iter() {
|
||||||
let name_lower = name.to_ascii_lowercase();
|
match headers_by_name.get_mut(name) {
|
||||||
headers_by_name
|
None => {
|
||||||
.entry(name_lower)
|
headers_by_name.insert(name, vec![value.as_str()]);
|
||||||
.or_insert(vec![])
|
}
|
||||||
.push(value.as_str());
|
Some(headers) => {
|
||||||
|
headers.push(value.as_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, values) in headers_by_name {
|
for (name, values) in headers_by_name {
|
||||||
|
|
|
@ -5,12 +5,11 @@ use hyper::{Request, Response, StatusCode};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
use garage_api_common::signature::verify_signed_content;
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
||||||
use crate::error::*;
|
use crate::signature::verify_signed_content;
|
||||||
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
|
||||||
|
|
||||||
use garage_model::bucket_table::{
|
use garage_model::bucket_table::{
|
||||||
parse_lifecycle_date, Bucket, LifecycleExpiration as GarageLifecycleExpiration,
|
parse_lifecycle_date, Bucket, LifecycleExpiration as GarageLifecycleExpiration,
|
||||||
|
|
|
@ -13,14 +13,13 @@ use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use garage_table::EnumerationOrder;
|
use garage_table::EnumerationOrder;
|
||||||
|
|
||||||
use garage_api_common::encoding::*;
|
use crate::encoding::*;
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::error::*;
|
||||||
use crate::error::*;
|
use crate::s3::multipart as s3_multipart;
|
||||||
use crate::multipart as s3_multipart;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
const DUMMY_NAME: &str = "Dummy Key";
|
const DUMMY_NAME: &str = "Dummy Key";
|
||||||
const DUMMY_KEY: &str = "GKDummyKey";
|
const DUMMY_KEY: &str = "GKDummyKey";
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#[macro_use]
|
|
||||||
extern crate tracing;
|
|
||||||
|
|
||||||
pub mod api_server;
|
pub mod api_server;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
|
|
@ -15,15 +15,14 @@ use garage_model::s3::mpu_table::*;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
use garage_api_common::signature::verify_signed_content;
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::checksum::*;
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::checksum::*;
|
use crate::s3::error::*;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::put::*;
|
||||||
use crate::error::*;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::put::*;
|
use crate::signature::verify_signed_content;
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
// ----
|
// ----
|
||||||
|
|
||||||
|
|
|
@ -16,16 +16,15 @@ use serde::Deserialize;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
|
|
||||||
use garage_api_common::cors::*;
|
use crate::helpers::*;
|
||||||
use garage_api_common::helpers::*;
|
use crate::s3::api_server::ResBody;
|
||||||
use garage_api_common::signature::payload::{verify_v4, Authorization};
|
use crate::s3::checksum::*;
|
||||||
|
use crate::s3::cors::*;
|
||||||
use crate::api_server::ResBody;
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::checksum::*;
|
use crate::s3::error::*;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::put::{get_headers, save_stream, ChecksumMode};
|
||||||
use crate::error::*;
|
use crate::s3::xml as s3_xml;
|
||||||
use crate::put::{get_headers, save_stream, ChecksumMode};
|
use crate::signature::payload::{verify_v4, Authorization};
|
||||||
use crate::xml as s3_xml;
|
|
||||||
|
|
||||||
pub async fn handle_post_object(
|
pub async fn handle_post_object(
|
||||||
garage: Arc<Garage>,
|
garage: Arc<Garage>,
|
||||||
|
@ -108,8 +107,7 @@ pub async fn handle_post_object(
|
||||||
let bucket_id = garage
|
let bucket_id = garage
|
||||||
.bucket_helper()
|
.bucket_helper()
|
||||||
.resolve_bucket(&bucket_name, &api_key)
|
.resolve_bucket(&bucket_name, &api_key)
|
||||||
.await
|
.await?;
|
||||||
.map_err(pass_helper_error)?;
|
|
||||||
|
|
||||||
if !api_key.allow_write(&bucket_id) {
|
if !api_key.allow_write(&bucket_id) {
|
||||||
return Err(Error::forbidden("Operation is not allowed for this key."));
|
return Err(Error::forbidden("Operation is not allowed for this key."));
|
||||||
|
|
|
@ -30,12 +30,11 @@ use garage_model::s3::block_ref_table::*;
|
||||||
use garage_model::s3::object_table::*;
|
use garage_model::s3::object_table::*;
|
||||||
use garage_model::s3::version_table::*;
|
use garage_model::s3::version_table::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
use crate::s3::checksum::*;
|
||||||
use crate::checksum::*;
|
use crate::s3::encryption::EncryptionParams;
|
||||||
use crate::encryption::EncryptionParams;
|
use crate::s3::error::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
const PUT_BLOCKS_MAX_PARALLEL: usize = 3;
|
const PUT_BLOCKS_MAX_PARALLEL: usize = 3;
|
||||||
|
|
||||||
|
@ -623,7 +622,7 @@ pub(crate) fn get_headers(headers: &HeaderMap<HeaderValue>) -> Result<HeaderList
|
||||||
for (name, value) in headers.iter() {
|
for (name, value) in headers.iter() {
|
||||||
if name.as_str().starts_with("x-amz-meta-") {
|
if name.as_str().starts_with("x-amz-meta-") {
|
||||||
ret.push((
|
ret.push((
|
||||||
name.as_str().to_ascii_lowercase(),
|
name.to_string(),
|
||||||
std::str::from_utf8(value.as_bytes())?.to_string(),
|
std::str::from_utf8(value.as_bytes())?.to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,9 @@ use std::borrow::Cow;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, Method, Request};
|
use hyper::{HeaderMap, Method, Request};
|
||||||
|
|
||||||
use garage_api_common::helpers::Authorization;
|
use crate::helpers::Authorization;
|
||||||
use garage_api_common::router_macros::{generateQueryParameters, router_match};
|
use crate::router_macros::{generateQueryParameters, router_match};
|
||||||
|
use crate::s3::error::*;
|
||||||
use crate::error::*;
|
|
||||||
|
|
||||||
router_match! {@func
|
router_match! {@func
|
||||||
|
|
||||||
|
|
|
@ -4,16 +4,15 @@ use http_body_util::BodyExt;
|
||||||
use hyper::{Request, Response, StatusCode};
|
use hyper::{Request, Response, StatusCode};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::helpers::*;
|
||||||
|
use crate::s3::api_server::{ReqBody, ResBody};
|
||||||
|
use crate::s3::error::*;
|
||||||
|
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
||||||
|
use crate::signature::verify_signed_content;
|
||||||
|
|
||||||
use garage_model::bucket_table::*;
|
use garage_model::bucket_table::*;
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_common::signature::verify_signed_content;
|
|
||||||
|
|
||||||
use crate::api_server::{ReqBody, ResBody};
|
|
||||||
use crate::error::*;
|
|
||||||
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
|
|
||||||
|
|
||||||
pub async fn handle_get_website(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
pub async fn handle_get_website(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
|
||||||
let ReqCtx { bucket_params, .. } = ctx;
|
let ReqCtx { bucket_params, .. } = ctx;
|
||||||
if let Some(website) = bucket_params.website_config.get() {
|
if let Some(website) = bucket_params.website_config.get() {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use quick_xml::se::to_string;
|
use quick_xml::se::to_string;
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
use crate::error::Error as ApiError;
|
use crate::s3::error::Error as ApiError;
|
||||||
|
|
||||||
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
|
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
|
||||||
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
||||||
|
|
|
@ -518,7 +518,7 @@ impl Authorization {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_form(params: &HeaderMap) -> Result<Self, Error> {
|
pub(crate) fn parse_form(params: &HeaderMap) -> Result<Self, Error> {
|
||||||
let algorithm = params
|
let algorithm = params
|
||||||
.get(X_AMZ_ALGORITHM)
|
.get(X_AMZ_ALGORITHM)
|
||||||
.ok_or_bad_request("Missing X-Amz-Algorithm header")?
|
.ok_or_bad_request("Missing X-Amz-Algorithm header")?
|
|
@ -34,8 +34,10 @@ async-compression.workspace = true
|
||||||
zstd.workspace = true
|
zstd.workspace = true
|
||||||
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
serde_bytes.workspace = true
|
||||||
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
tokio-util.workspace = true
|
tokio-util.workspace = true
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use arc_swap::{ArcSwap, ArcSwapOption};
|
use arc_swap::{ArcSwap, ArcSwapOption};
|
||||||
|
use async_trait::async_trait;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
@ -687,6 +688,7 @@ impl BlockManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl StreamingEndpointHandler<BlockRpc> for BlockManager {
|
impl StreamingEndpointHandler<BlockRpc> for BlockManager {
|
||||||
async fn handle(self: &Arc<Self>, mut message: Req<BlockRpc>, _from: NodeID) -> Resp<BlockRpc> {
|
async fn handle(self: &Arc<Self>, mut message: Req<BlockRpc>, _from: NodeID) -> Resp<BlockRpc> {
|
||||||
match message.msg() {
|
match message.msg() {
|
||||||
|
|
|
@ -13,6 +13,7 @@ path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
err-derive.workspace = true
|
err-derive.workspace = true
|
||||||
|
hexdump.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
|
||||||
heed = { workspace = true, optional = true }
|
heed = { workspace = true, optional = true }
|
||||||
|
|
|
@ -23,9 +23,7 @@ path = "tests/lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
format_table.workspace = true
|
format_table.workspace = true
|
||||||
garage_db.workspace = true
|
garage_db.workspace = true
|
||||||
garage_api_admin.workspace = true
|
garage_api.workspace = true
|
||||||
garage_api_s3.workspace = true
|
|
||||||
garage_api_k2v = { workspace = true, optional = true }
|
|
||||||
garage_block.workspace = true
|
garage_block.workspace = true
|
||||||
garage_model.workspace = true
|
garage_model.workspace = true
|
||||||
garage_net.workspace = true
|
garage_net.workspace = true
|
||||||
|
@ -42,6 +40,7 @@ parse_duration.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
tracing-subscriber.workspace = true
|
tracing-subscriber.workspace = true
|
||||||
|
rand.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
sha1.workspace = true
|
sha1.workspace = true
|
||||||
sodiumoxide.workspace = true
|
sodiumoxide.workspace = true
|
||||||
|
@ -49,18 +48,21 @@ structopt.workspace = true
|
||||||
git-version.workspace = true
|
git-version.workspace = true
|
||||||
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
serde_bytes.workspace = true
|
||||||
|
toml.workspace = true
|
||||||
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
||||||
opentelemetry.workspace = true
|
opentelemetry.workspace = true
|
||||||
opentelemetry-prometheus = { workspace = true, optional = true }
|
opentelemetry-prometheus = { workspace = true, optional = true }
|
||||||
opentelemetry-otlp = { workspace = true, optional = true }
|
opentelemetry-otlp = { workspace = true, optional = true }
|
||||||
|
prometheus = { workspace = true, optional = true }
|
||||||
syslog-tracing = { workspace = true, optional = true }
|
syslog-tracing = { workspace = true, optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
garage_api_common.workspace = true
|
aws-config.workspace = true
|
||||||
|
|
||||||
aws-sdk-s3.workspace = true
|
aws-sdk-s3.workspace = true
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
http.workspace = true
|
http.workspace = true
|
||||||
|
@ -82,7 +84,7 @@ k2v-client.workspace = true
|
||||||
[features]
|
[features]
|
||||||
default = [ "bundled-libs", "metrics", "lmdb", "sqlite", "k2v" ]
|
default = [ "bundled-libs", "metrics", "lmdb", "sqlite", "k2v" ]
|
||||||
|
|
||||||
k2v = [ "garage_util/k2v", "garage_api_k2v" ]
|
k2v = [ "garage_util/k2v", "garage_api/k2v" ]
|
||||||
|
|
||||||
# Database engines
|
# Database engines
|
||||||
lmdb = [ "garage_model/lmdb" ]
|
lmdb = [ "garage_model/lmdb" ]
|
||||||
|
@ -93,7 +95,7 @@ consul-discovery = [ "garage_rpc/consul-discovery" ]
|
||||||
# Automatic registration and discovery via Kubernetes API
|
# Automatic registration and discovery via Kubernetes API
|
||||||
kubernetes-discovery = [ "garage_rpc/kubernetes-discovery" ]
|
kubernetes-discovery = [ "garage_rpc/kubernetes-discovery" ]
|
||||||
# Prometheus exporter (/metrics endpoint).
|
# Prometheus exporter (/metrics endpoint).
|
||||||
metrics = [ "garage_api_admin/metrics", "opentelemetry-prometheus" ]
|
metrics = [ "garage_api/metrics", "opentelemetry-prometheus", "prometheus" ]
|
||||||
# Exporter for the OpenTelemetry Collector.
|
# Exporter for the OpenTelemetry Collector.
|
||||||
telemetry-otlp = [ "opentelemetry-otlp" ]
|
telemetry-otlp = [ "opentelemetry-otlp" ]
|
||||||
# Logging to syslog
|
# Logging to syslog
|
||||||
|
|
|
@ -4,11 +4,9 @@ mod key;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::future::Future;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use futures::future::FutureExt;
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use format_table::format_table_to_string;
|
use format_table::format_table_to_string;
|
||||||
|
@ -507,25 +505,22 @@ impl AdminRpcHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<AdminRpc> for AdminRpcHandler {
|
impl EndpointHandler<AdminRpc> for AdminRpcHandler {
|
||||||
fn handle(
|
async fn handle(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
message: &AdminRpc,
|
message: &AdminRpc,
|
||||||
_from: NodeID,
|
_from: NodeID,
|
||||||
) -> impl Future<Output = Result<AdminRpc, Error>> + Send {
|
) -> Result<AdminRpc, Error> {
|
||||||
let self2 = self.clone();
|
|
||||||
async move {
|
|
||||||
match message {
|
match message {
|
||||||
AdminRpc::BucketOperation(bo) => self2.handle_bucket_cmd(bo).await,
|
AdminRpc::BucketOperation(bo) => self.handle_bucket_cmd(bo).await,
|
||||||
AdminRpc::KeyOperation(ko) => self2.handle_key_cmd(ko).await,
|
AdminRpc::KeyOperation(ko) => self.handle_key_cmd(ko).await,
|
||||||
AdminRpc::LaunchRepair(opt) => self2.handle_launch_repair(opt.clone()).await,
|
AdminRpc::LaunchRepair(opt) => self.handle_launch_repair(opt.clone()).await,
|
||||||
AdminRpc::Stats(opt) => self2.handle_stats(opt.clone()).await,
|
AdminRpc::Stats(opt) => self.handle_stats(opt.clone()).await,
|
||||||
AdminRpc::Worker(wo) => self2.handle_worker_cmd(wo).await,
|
AdminRpc::Worker(wo) => self.handle_worker_cmd(wo).await,
|
||||||
AdminRpc::BlockOperation(bo) => self2.handle_block_cmd(bo).await,
|
AdminRpc::BlockOperation(bo) => self.handle_block_cmd(bo).await,
|
||||||
AdminRpc::MetaOperation(mo) => self2.handle_meta_cmd(mo).await,
|
AdminRpc::MetaOperation(mo) => self.handle_meta_cmd(mo).await,
|
||||||
m => Err(GarageError::unexpected_rpc_message(m).into()),
|
m => Err(GarageError::unexpected_rpc_message(m).into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use std::future::Future;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
@ -94,16 +93,17 @@ pub async fn launch_online_repair(
|
||||||
|
|
||||||
// ----
|
// ----
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
trait TableRepair: Send + Sync + 'static {
|
trait TableRepair: Send + Sync + 'static {
|
||||||
type T: TableSchema;
|
type T: TableSchema;
|
||||||
|
|
||||||
fn table(garage: &Garage) -> &Table<Self::T, TableShardedReplication>;
|
fn table(garage: &Garage) -> &Table<Self::T, TableShardedReplication>;
|
||||||
|
|
||||||
fn process(
|
async fn process(
|
||||||
&mut self,
|
&mut self,
|
||||||
garage: &Garage,
|
garage: &Garage,
|
||||||
entry: <<Self as TableRepair>::T as TableSchema>::E,
|
entry: <<Self as TableRepair>::T as TableSchema>::E,
|
||||||
) -> impl Future<Output = Result<bool, Error>> + Send;
|
) -> Result<bool, Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TableRepairWorker<T: TableRepair> {
|
struct TableRepairWorker<T: TableRepair> {
|
||||||
|
@ -174,6 +174,7 @@ impl<R: TableRepair> Worker for TableRepairWorker<R> {
|
||||||
|
|
||||||
struct RepairVersions;
|
struct RepairVersions;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl TableRepair for RepairVersions {
|
impl TableRepair for RepairVersions {
|
||||||
type T = VersionTable;
|
type T = VersionTable;
|
||||||
|
|
||||||
|
@ -220,6 +221,7 @@ impl TableRepair for RepairVersions {
|
||||||
|
|
||||||
struct RepairBlockRefs;
|
struct RepairBlockRefs;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl TableRepair for RepairBlockRefs {
|
impl TableRepair for RepairBlockRefs {
|
||||||
type T = BlockRefTable;
|
type T = BlockRefTable;
|
||||||
|
|
||||||
|
@ -255,6 +257,7 @@ impl TableRepair for RepairBlockRefs {
|
||||||
|
|
||||||
struct RepairMpu;
|
struct RepairMpu;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl TableRepair for RepairMpu {
|
impl TableRepair for RepairMpu {
|
||||||
type T = MultipartUploadTable;
|
type T = MultipartUploadTable;
|
||||||
|
|
||||||
|
|
|
@ -6,13 +6,13 @@ use garage_util::background::*;
|
||||||
use garage_util::config::*;
|
use garage_util::config::*;
|
||||||
use garage_util::error::Error;
|
use garage_util::error::Error;
|
||||||
|
|
||||||
use garage_api_admin::api_server::AdminApiServer;
|
use garage_api::admin::api_server::AdminApiServer;
|
||||||
use garage_api_s3::api_server::S3ApiServer;
|
use garage_api::s3::api_server::S3ApiServer;
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_web::WebServer;
|
use garage_web::WebServer;
|
||||||
|
|
||||||
#[cfg(feature = "k2v")]
|
#[cfg(feature = "k2v")]
|
||||||
use garage_api_k2v::api_server::K2VApiServer;
|
use garage_api::k2v::api_server::K2VApiServer;
|
||||||
|
|
||||||
use crate::admin::*;
|
use crate::admin::*;
|
||||||
use crate::secrets::{fill_secrets, Secrets};
|
use crate::secrets::{fill_secrets, Secrets};
|
||||||
|
|
|
@ -15,7 +15,7 @@ use hyper_util::client::legacy::{connect::HttpConnector, Client};
|
||||||
use hyper_util::rt::TokioExecutor;
|
use hyper_util::rt::TokioExecutor;
|
||||||
|
|
||||||
use super::garage::{Instance, Key};
|
use super::garage::{Instance, Key};
|
||||||
use garage_api_common::signature;
|
use garage_api::signature;
|
||||||
|
|
||||||
pub type Body = FullBody<hyper::body::Bytes>;
|
pub type Body = FullBody<hyper::body::Bytes>;
|
||||||
|
|
||||||
|
|
|
@ -29,11 +29,12 @@ tokio.workspace = true
|
||||||
# cli deps
|
# cli deps
|
||||||
clap = { workspace = true, optional = true }
|
clap = { workspace = true, optional = true }
|
||||||
format_table = { workspace = true, optional = true }
|
format_table = { workspace = true, optional = true }
|
||||||
|
tracing = { workspace = true, optional = true }
|
||||||
tracing-subscriber = { workspace = true, optional = true }
|
tracing-subscriber = { workspace = true, optional = true }
|
||||||
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
cli = ["clap", "tokio/fs", "tokio/io-std", "tracing-subscriber", "format_table"]
|
cli = ["clap", "tokio/fs", "tokio/io-std", "tracing", "tracing-subscriber", "format_table"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
|
@ -22,6 +22,7 @@ garage_util.workspace = true
|
||||||
garage_net.workspace = true
|
garage_net.workspace = true
|
||||||
|
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
|
arc-swap.workspace = true
|
||||||
blake2.workspace = true
|
blake2.workspace = true
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
err-derive.workspace = true
|
err-derive.workspace = true
|
||||||
|
@ -37,7 +38,9 @@ serde.workspace = true
|
||||||
serde_bytes.workspace = true
|
serde_bytes.workspace = true
|
||||||
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
opentelemetry.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = [ "lmdb", "sqlite" ]
|
default = [ "lmdb", "sqlite" ]
|
||||||
|
|
|
@ -16,6 +16,8 @@ use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use garage_util::data::*;
|
use garage_util::data::*;
|
||||||
|
|
||||||
|
use crate::helper::error::{Error as HelperError, OkOrBadRequest};
|
||||||
|
|
||||||
/// Node IDs used in K2V are u64 integers that are the abbreviation
|
/// Node IDs used in K2V are u64 integers that are the abbreviation
|
||||||
/// of full Garage node IDs which are 256-bit UUIDs.
|
/// of full Garage node IDs which are 256-bit UUIDs.
|
||||||
pub type K2VNodeId = u64;
|
pub type K2VNodeId = u64;
|
||||||
|
@ -97,6 +99,10 @@ impl CausalContext {
|
||||||
Some(ret)
|
Some(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse_helper(s: &str) -> Result<Self, HelperError> {
|
||||||
|
Self::parse(s).ok_or_bad_request("Invalid causality token")
|
||||||
|
}
|
||||||
|
|
||||||
/// Check if this causal context contains newer items than another one
|
/// Check if this causal context contains newer items than another one
|
||||||
pub fn is_newer_than(&self, other: &Self) -> bool {
|
pub fn is_newer_than(&self, other: &Self) -> bool {
|
||||||
vclock_gt(&self.vector_clock, &other.vector_clock)
|
vclock_gt(&self.vector_clock, &other.vector_clock)
|
||||||
|
|
|
@ -10,6 +10,7 @@ use std::convert::TryInto;
|
||||||
use std::sync::{Arc, Mutex, MutexGuard};
|
use std::sync::{Arc, Mutex, MutexGuard};
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::stream::FuturesUnordered;
|
use futures::stream::FuturesUnordered;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
@ -536,6 +537,7 @@ impl K2VRpcHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<K2VRpc> for K2VRpcHandler {
|
impl EndpointHandler<K2VRpc> for K2VRpcHandler {
|
||||||
async fn handle(self: &Arc<Self>, message: &K2VRpc, _from: NodeID) -> Result<K2VRpc, Error> {
|
async fn handle(self: &Arc<Self>, message: &K2VRpc, _from: NodeID) -> Result<K2VRpc, Error> {
|
||||||
match message {
|
match message {
|
||||||
|
|
|
@ -30,6 +30,7 @@ rand.workspace = true
|
||||||
|
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
arc-swap.workspace = true
|
arc-swap.workspace = true
|
||||||
|
async-trait.workspace = true
|
||||||
err-derive.workspace = true
|
err-derive.workspace = true
|
||||||
bytes.workspace = true
|
bytes.workspace = true
|
||||||
cfg-if.workspace = true
|
cfg-if.workspace = true
|
||||||
|
|
|
@ -6,6 +6,7 @@ use std::sync::{Arc, Mutex};
|
||||||
use std::task::Poll;
|
use std::task::Poll;
|
||||||
|
|
||||||
use arc_swap::ArcSwapOption;
|
use arc_swap::ArcSwapOption;
|
||||||
|
use async_trait::async_trait;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use log::{debug, error, trace};
|
use log::{debug, error, trace};
|
||||||
|
|
||||||
|
@ -219,6 +220,7 @@ impl ClientConn {
|
||||||
|
|
||||||
impl SendLoop for ClientConn {}
|
impl SendLoop for ClientConn {}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl RecvLoop for ClientConn {
|
impl RecvLoop for ClientConn {
|
||||||
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream) {
|
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream) {
|
||||||
trace!("ClientConn recv_handler {}", id);
|
trace!("ClientConn recv_handler {}", id);
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
use std::future::Future;
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arc_swap::ArcSwapOption;
|
use arc_swap::ArcSwapOption;
|
||||||
use futures::future::{BoxFuture, FutureExt};
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::message::*;
|
use crate::message::*;
|
||||||
|
@ -15,17 +14,19 @@ use crate::netapp::*;
|
||||||
/// attached to the response..
|
/// attached to the response..
|
||||||
///
|
///
|
||||||
/// The handler object should be in an Arc, see `Endpoint::set_handler`
|
/// The handler object should be in an Arc, see `Endpoint::set_handler`
|
||||||
|
#[async_trait]
|
||||||
pub trait StreamingEndpointHandler<M>: Send + Sync
|
pub trait StreamingEndpointHandler<M>: Send + Sync
|
||||||
where
|
where
|
||||||
M: Message,
|
M: Message,
|
||||||
{
|
{
|
||||||
fn handle(self: &Arc<Self>, m: Req<M>, from: NodeID) -> impl Future<Output = Resp<M>> + Send;
|
async fn handle(self: &Arc<Self>, m: Req<M>, from: NodeID) -> Resp<M>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If one simply wants to use an endpoint in a client fashion,
|
/// If one simply wants to use an endpoint in a client fashion,
|
||||||
/// without locally serving requests to that endpoint,
|
/// without locally serving requests to that endpoint,
|
||||||
/// use the unit type `()` as the handler type:
|
/// use the unit type `()` as the handler type:
|
||||||
/// it will panic if it is ever made to handle request.
|
/// it will panic if it is ever made to handle request.
|
||||||
|
#[async_trait]
|
||||||
impl<M: Message> EndpointHandler<M> for () {
|
impl<M: Message> EndpointHandler<M> for () {
|
||||||
async fn handle(self: &Arc<()>, _m: &M, _from: NodeID) -> M::Response {
|
async fn handle(self: &Arc<()>, _m: &M, _from: NodeID) -> M::Response {
|
||||||
panic!("This endpoint should not have a local handler.");
|
panic!("This endpoint should not have a local handler.");
|
||||||
|
@ -37,13 +38,15 @@ impl<M: Message> EndpointHandler<M> for () {
|
||||||
/// This trait should be implemented by an object of your application
|
/// This trait should be implemented by an object of your application
|
||||||
/// that can handle a message of type `M`, in the cases where it doesn't
|
/// that can handle a message of type `M`, in the cases where it doesn't
|
||||||
/// care about attached stream in the request nor in the response.
|
/// care about attached stream in the request nor in the response.
|
||||||
|
#[async_trait]
|
||||||
pub trait EndpointHandler<M>: Send + Sync
|
pub trait EndpointHandler<M>: Send + Sync
|
||||||
where
|
where
|
||||||
M: Message,
|
M: Message,
|
||||||
{
|
{
|
||||||
fn handle(self: &Arc<Self>, m: &M, from: NodeID) -> impl Future<Output = M::Response> + Send;
|
async fn handle(self: &Arc<Self>, m: &M, from: NodeID) -> M::Response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<T, M> StreamingEndpointHandler<M> for T
|
impl<T, M> StreamingEndpointHandler<M> for T
|
||||||
where
|
where
|
||||||
T: EndpointHandler<M>,
|
T: EndpointHandler<M>,
|
||||||
|
@ -158,8 +161,9 @@ where
|
||||||
|
|
||||||
pub(crate) type DynEndpoint = Box<dyn GenericEndpoint + Send + Sync>;
|
pub(crate) type DynEndpoint = Box<dyn GenericEndpoint + Send + Sync>;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
pub(crate) trait GenericEndpoint {
|
pub(crate) trait GenericEndpoint {
|
||||||
fn handle(&self, req_enc: ReqEnc, from: NodeID) -> BoxFuture<Result<RespEnc, Error>>;
|
async fn handle(&self, req_enc: ReqEnc, from: NodeID) -> Result<RespEnc, Error>;
|
||||||
fn drop_handler(&self);
|
fn drop_handler(&self);
|
||||||
fn clone_endpoint(&self) -> DynEndpoint;
|
fn clone_endpoint(&self) -> DynEndpoint;
|
||||||
}
|
}
|
||||||
|
@ -170,13 +174,13 @@ where
|
||||||
M: Message,
|
M: Message,
|
||||||
H: StreamingEndpointHandler<M>;
|
H: StreamingEndpointHandler<M>;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<M, H> GenericEndpoint for EndpointArc<M, H>
|
impl<M, H> GenericEndpoint for EndpointArc<M, H>
|
||||||
where
|
where
|
||||||
M: Message,
|
M: Message,
|
||||||
H: StreamingEndpointHandler<M> + 'static,
|
H: StreamingEndpointHandler<M> + 'static,
|
||||||
{
|
{
|
||||||
fn handle(&self, req_enc: ReqEnc, from: NodeID) -> BoxFuture<Result<RespEnc, Error>> {
|
async fn handle(&self, req_enc: ReqEnc, from: NodeID) -> Result<RespEnc, Error> {
|
||||||
async move {
|
|
||||||
match self.0.handler.load_full() {
|
match self.0.handler.load_full() {
|
||||||
None => Err(Error::NoHandler),
|
None => Err(Error::NoHandler),
|
||||||
Some(h) => {
|
Some(h) => {
|
||||||
|
@ -186,8 +190,6 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn drop_handler(&self) {
|
fn drop_handler(&self) {
|
||||||
self.0.handler.swap(None);
|
self.0.handler.swap(None);
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::sync::{Arc, RwLock};
|
||||||
use log::{debug, error, info, trace, warn};
|
use log::{debug, error, info, trace, warn};
|
||||||
|
|
||||||
use arc_swap::ArcSwapOption;
|
use arc_swap::ArcSwapOption;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sodiumoxide::crypto::auth;
|
use sodiumoxide::crypto::auth;
|
||||||
|
@ -456,6 +457,7 @@ impl NetApp {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<HelloMessage> for NetApp {
|
impl EndpointHandler<HelloMessage> for NetApp {
|
||||||
async fn handle(self: &Arc<Self>, msg: &HelloMessage, from: NodeID) {
|
async fn handle(self: &Arc<Self>, msg: &HelloMessage, from: NodeID) {
|
||||||
debug!("Hello from {:?}: {:?}", hex::encode(&from[..8]), msg);
|
debug!("Hello from {:?}: {:?}", hex::encode(&from[..8]), msg);
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::sync::{Arc, RwLock};
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use arc_swap::ArcSwap;
|
use arc_swap::ArcSwap;
|
||||||
|
use async_trait::async_trait;
|
||||||
use log::{debug, info, trace, warn};
|
use log::{debug, info, trace, warn};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
@ -591,6 +592,7 @@ impl PeeringManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<PingMessage> for PeeringManager {
|
impl EndpointHandler<PingMessage> for PeeringManager {
|
||||||
async fn handle(self: &Arc<Self>, ping: &PingMessage, from: NodeID) -> PingMessage {
|
async fn handle(self: &Arc<Self>, ping: &PingMessage, from: NodeID) -> PingMessage {
|
||||||
let ping_resp = PingMessage {
|
let ping_resp = PingMessage {
|
||||||
|
@ -602,6 +604,7 @@ impl EndpointHandler<PingMessage> for PeeringManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<PeerListMessage> for PeeringManager {
|
impl EndpointHandler<PeerListMessage> for PeeringManager {
|
||||||
async fn handle(
|
async fn handle(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use log::*;
|
use log::*;
|
||||||
|
|
||||||
|
@ -49,6 +50,7 @@ impl Drop for Sender {
|
||||||
/// according to the protocol defined above: chunks of message in progress of being
|
/// according to the protocol defined above: chunks of message in progress of being
|
||||||
/// received are stored in a buffer, and when the last chunk of a message is received,
|
/// received are stored in a buffer, and when the last chunk of a message is received,
|
||||||
/// the full message is passed to the receive handler.
|
/// the full message is passed to the receive handler.
|
||||||
|
#[async_trait]
|
||||||
pub(crate) trait RecvLoop: Sync + 'static {
|
pub(crate) trait RecvLoop: Sync + 'static {
|
||||||
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream);
|
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream);
|
||||||
fn cancel_handler(self: &Arc<Self>, _id: RequestID) {}
|
fn cancel_handler(self: &Arc<Self>, _id: RequestID) {}
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::pin::Pin;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::task::{Context, Poll};
|
use std::task::{Context, Poll};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use bytes::{BufMut, Bytes, BytesMut};
|
use bytes::{BufMut, Bytes, BytesMut};
|
||||||
use log::*;
|
use log::*;
|
||||||
|
|
||||||
|
@ -272,6 +273,7 @@ impl DataFrame {
|
||||||
///
|
///
|
||||||
/// The `.send_loop()` exits when the sending end of the channel is closed,
|
/// The `.send_loop()` exits when the sending end of the channel is closed,
|
||||||
/// or if there is an error at any time writing to the async writer.
|
/// or if there is an error at any time writing to the async writer.
|
||||||
|
#[async_trait]
|
||||||
pub(crate) trait SendLoop: Sync {
|
pub(crate) trait SendLoop: Sync {
|
||||||
async fn send_loop<W>(
|
async fn send_loop<W>(
|
||||||
self: Arc<Self>,
|
self: Arc<Self>,
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::net::SocketAddr;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use arc_swap::ArcSwapOption;
|
use arc_swap::ArcSwapOption;
|
||||||
|
use async_trait::async_trait;
|
||||||
use log::*;
|
use log::*;
|
||||||
|
|
||||||
use futures::io::{AsyncReadExt, AsyncWriteExt};
|
use futures::io::{AsyncReadExt, AsyncWriteExt};
|
||||||
|
@ -173,6 +174,7 @@ impl ServerConn {
|
||||||
|
|
||||||
impl SendLoop for ServerConn {}
|
impl SendLoop for ServerConn {}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl RecvLoop for ServerConn {
|
impl RecvLoop for ServerConn {
|
||||||
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream) {
|
fn recv_handler(self: &Arc<Self>, id: RequestID, stream: ByteStream) {
|
||||||
let resp_send = match self.resp_send.load_full() {
|
let resp_send = match self.resp_send.load_full() {
|
||||||
|
|
|
@ -15,10 +15,12 @@ path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
format_table.workspace = true
|
format_table.workspace = true
|
||||||
|
garage_db.workspace = true
|
||||||
garage_util.workspace = true
|
garage_util.workspace = true
|
||||||
garage_net.workspace = true
|
garage_net.workspace = true
|
||||||
|
|
||||||
arc-swap.workspace = true
|
arc-swap.workspace = true
|
||||||
|
bytes.workspace = true
|
||||||
bytesize.workspace = true
|
bytesize.workspace = true
|
||||||
gethostname.workspace = true
|
gethostname.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
|
@ -44,7 +46,9 @@ reqwest = { workspace = true, optional = true }
|
||||||
pnet_datalink.workspace = true
|
pnet_datalink.workspace = true
|
||||||
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
futures-util.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
tokio-stream.workspace = true
|
||||||
opentelemetry.workspace = true
|
opentelemetry.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|
|
@ -7,6 +7,7 @@ use std::sync::{Arc, RwLock, RwLockReadGuard};
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use arc_swap::ArcSwapOption;
|
use arc_swap::ArcSwapOption;
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::join;
|
use futures::join;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sodiumoxide::crypto::sign::ed25519;
|
use sodiumoxide::crypto::sign::ed25519;
|
||||||
|
@ -748,6 +749,7 @@ impl System {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl EndpointHandler<SystemRpc> for System {
|
impl EndpointHandler<SystemRpc> for System {
|
||||||
async fn handle(self: &Arc<Self>, msg: &SystemRpc, from: NodeID) -> Result<SystemRpc, Error> {
|
async fn handle(self: &Arc<Self>, msg: &SystemRpc, from: NodeID) -> Result<SystemRpc, Error> {
|
||||||
match msg {
|
match msg {
|
||||||
|
|
|
@ -22,6 +22,7 @@ opentelemetry.workspace = true
|
||||||
|
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
arc-swap.workspace = true
|
arc-swap.workspace = true
|
||||||
|
bytes.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
hexdump.workspace = true
|
hexdump.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_bytes::ByteBuf;
|
use serde_bytes::ByteBuf;
|
||||||
|
|
||||||
|
@ -273,6 +272,7 @@ impl<F: TableSchema, R: TableReplication> TableGc<F, R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<F: TableSchema, R: TableReplication> EndpointHandler<GcRpc> for TableGc<F, R> {
|
impl<F: TableSchema, R: TableReplication> EndpointHandler<GcRpc> for TableGc<F, R> {
|
||||||
async fn handle(self: &Arc<Self>, message: &GcRpc, _from: NodeID) -> Result<GcRpc, Error> {
|
async fn handle(self: &Arc<Self>, message: &GcRpc, _from: NodeID) -> Result<GcRpc, Error> {
|
||||||
match message {
|
match message {
|
||||||
|
|
|
@ -444,6 +444,7 @@ impl<F: TableSchema, R: TableReplication> TableSyncer<F, R> {
|
||||||
|
|
||||||
// ======= SYNCHRONIZATION PROCEDURE -- RECEIVER SIDE ======
|
// ======= SYNCHRONIZATION PROCEDURE -- RECEIVER SIDE ======
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<F: TableSchema, R: TableReplication> EndpointHandler<SyncRpc> for TableSyncer<F, R> {
|
impl<F: TableSchema, R: TableReplication> EndpointHandler<SyncRpc> for TableSyncer<F, R> {
|
||||||
async fn handle(self: &Arc<Self>, message: &SyncRpc, from: NodeID) -> Result<SyncRpc, Error> {
|
async fn handle(self: &Arc<Self>, message: &SyncRpc, from: NodeID) -> Result<SyncRpc, Error> {
|
||||||
match message {
|
match message {
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::borrow::Borrow;
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashMap};
|
use std::collections::{BTreeMap, BTreeSet, HashMap};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::stream::*;
|
use futures::stream::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_bytes::ByteBuf;
|
use serde_bytes::ByteBuf;
|
||||||
|
@ -203,10 +204,6 @@ impl<F: TableSchema, R: TableReplication> Table<F, R> {
|
||||||
entries_vec.push((write_sets, e_enc));
|
entries_vec.push((write_sets, e_enc));
|
||||||
}
|
}
|
||||||
|
|
||||||
if entries_vec.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute a deduplicated list of all of the write sets,
|
// Compute a deduplicated list of all of the write sets,
|
||||||
// and compute an index from each node to the position of the sets in which
|
// and compute an index from each node to the position of the sets in which
|
||||||
// it takes part, to optimize the detection of a quorum.
|
// it takes part, to optimize the detection of a quorum.
|
||||||
|
@ -499,6 +496,7 @@ impl<F: TableSchema, R: TableReplication> Table<F, R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<F: TableSchema, R: TableReplication> EndpointHandler<TableRpc<F>> for Table<F, R> {
|
impl<F: TableSchema, R: TableReplication> EndpointHandler<TableRpc<F>> for Table<F, R> {
|
||||||
async fn handle(
|
async fn handle(
|
||||||
self: &Arc<Self>,
|
self: &Arc<Self>,
|
||||||
|
|
|
@ -20,7 +20,9 @@ garage_net.workspace = true
|
||||||
arc-swap.workspace = true
|
arc-swap.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
blake2.workspace = true
|
blake2.workspace = true
|
||||||
|
bytes.workspace = true
|
||||||
bytesize.workspace = true
|
bytesize.workspace = true
|
||||||
|
digest.workspace = true
|
||||||
err-derive.workspace = true
|
err-derive.workspace = true
|
||||||
hexdump.workspace = true
|
hexdump.workspace = true
|
||||||
xxhash-rust.workspace = true
|
xxhash-rust.workspace = true
|
||||||
|
|
|
@ -14,8 +14,7 @@ path = "lib.rs"
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
garage_api_common.workspace = true
|
garage_api.workspace = true
|
||||||
garage_api_s3.workspace = true
|
|
||||||
garage_model.workspace = true
|
garage_model.workspace = true
|
||||||
garage_util.workspace = true
|
garage_util.workspace = true
|
||||||
garage_table.workspace = true
|
garage_table.workspace = true
|
||||||
|
@ -24,9 +23,12 @@ err-derive.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
percent-encoding.workspace = true
|
percent-encoding.workspace = true
|
||||||
|
|
||||||
|
futures.workspace = true
|
||||||
|
|
||||||
http.workspace = true
|
http.workspace = true
|
||||||
http-body-util.workspace = true
|
http-body-util.workspace = true
|
||||||
hyper.workspace = true
|
hyper.workspace = true
|
||||||
|
hyper-util.workspace = true
|
||||||
|
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
||||||
|
|
|
@ -2,14 +2,14 @@ use err_derive::Error;
|
||||||
use hyper::header::HeaderValue;
|
use hyper::header::HeaderValue;
|
||||||
use hyper::{HeaderMap, StatusCode};
|
use hyper::{HeaderMap, StatusCode};
|
||||||
|
|
||||||
use garage_api_common::generic_server::ApiError;
|
use garage_api::generic_server::ApiError;
|
||||||
|
|
||||||
/// Errors of this crate
|
/// Errors of this crate
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// An error received from the API crate
|
/// An error received from the API crate
|
||||||
#[error(display = "API error: {}", _0)]
|
#[error(display = "API error: {}", _0)]
|
||||||
ApiError(garage_api_s3::error::Error),
|
ApiError(garage_api::s3::error::Error),
|
||||||
|
|
||||||
/// The file does not exist
|
/// The file does not exist
|
||||||
#[error(display = "Not found")]
|
#[error(display = "Not found")]
|
||||||
|
@ -22,10 +22,10 @@ pub enum Error {
|
||||||
|
|
||||||
impl<T> From<T> for Error
|
impl<T> From<T> for Error
|
||||||
where
|
where
|
||||||
garage_api_s3::error::Error: From<T>,
|
garage_api::s3::error::Error: From<T>,
|
||||||
{
|
{
|
||||||
fn from(err: T) -> Self {
|
fn from(err: T) -> Self {
|
||||||
Error::ApiError(garage_api_s3::error::Error::from(err))
|
Error::ApiError(garage_api::s3::error::Error::from(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,15 +20,13 @@ use opentelemetry::{
|
||||||
|
|
||||||
use crate::error::*;
|
use crate::error::*;
|
||||||
|
|
||||||
use garage_api_common::cors::{
|
use garage_api::generic_server::{server_loop, UnixListenerOn};
|
||||||
add_cors_headers, find_matching_cors_rule, handle_options_for_bucket,
|
use garage_api::helpers::*;
|
||||||
};
|
use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket};
|
||||||
use garage_api_common::generic_server::{server_loop, UnixListenerOn};
|
use garage_api::s3::error::{
|
||||||
use garage_api_common::helpers::*;
|
|
||||||
use garage_api_s3::error::{
|
|
||||||
CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError,
|
CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError,
|
||||||
};
|
};
|
||||||
use garage_api_s3::get::{handle_get_without_ctx, handle_head_without_ctx};
|
use garage_api::s3::get::{handle_get_without_ctx, handle_head_without_ctx};
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue