Compare commits

..

2 commits

Author SHA1 Message Date
Baptiste Jonglez
8ff2aa729b db-snapshot: propagate any node snapshot error through RPC call
All checks were successful
ci/woodpecker/pr/debug Pipeline was successful
ci/woodpecker/push/debug Pipeline was successful
In particular, it means that "garage meta snapshot --all" will get an exit
code of 1 if any node fails to snapshot.

This makes sure that any external tool trying to snapshot nodes (e.g. from
cron) will be aware of the failure.

Fix #920
2025-01-27 18:33:36 +01:00
Baptiste Jonglez
1d5142dcea db-snapshot: Fix error reporting when using "garage meta snapshot --all"
Snapshot errors on remote nodes were not reported at all.

We now get proper error output such as:

    0fa0f35be69528ab  error: Internal error: DB error: LMDB: No space left on device (os error 28)
    88d92e2971d14bae  ok

Fix #920
2025-01-27 18:33:36 +01:00
88 changed files with 8557 additions and 1997 deletions

3
.cargo/config.toml Normal file
View file

@ -0,0 +1,3 @@
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]

View file

@ -16,21 +16,32 @@ steps:
- name: build
image: nixpkgs/nix:nixos-22.05
commands:
- nix-build -j4 --attr flakePackages.dev
- nix-build --no-build-output --attr pkgs.amd64.debug --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- name: unit + func tests (lmdb)
- name: unit + func tests
image: nixpkgs/nix:nixos-22.05
environment:
GARAGE_TEST_INTEGRATION_EXE: result-bin/bin/garage
GARAGE_TEST_INTEGRATION_PATH: tmp-garage-integration
commands:
- nix-build -j4 --attr flakePackages.tests-lmdb
- name: unit + func tests (sqlite)
image: nixpkgs/nix:nixos-22.05
commands:
- nix-build -j4 --attr flakePackages.tests-sqlite
- nix-build --no-build-output --attr pkgs.amd64.debug --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- nix-build --no-build-output --attr test.amd64
- ./result/bin/garage_db-*
- ./result/bin/garage_api-*
- ./result/bin/garage_model-*
- ./result/bin/garage_rpc-*
- ./result/bin/garage_table-*
- ./result/bin/garage_util-*
- ./result/bin/garage_web-*
- ./result/bin/garage-*
- GARAGE_TEST_INTEGRATION_DB_ENGINE=lmdb ./result/bin/integration-* || (cat tmp-garage-integration/stderr.log; false)
- nix-shell --attr ci --run "killall -9 garage" || true
- GARAGE_TEST_INTEGRATION_DB_ENGINE=sqlite ./result/bin/integration-* || (cat tmp-garage-integration/stderr.log; false)
- rm result
- rm -rv tmp-garage-integration
- name: integration tests
image: nixpkgs/nix:nixos-22.05
commands:
- nix-build -j4 --attr flakePackages.dev
- nix-build --no-build-output --attr pkgs.amd64.debug --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- nix-shell --attr ci --run ./script/test-smoke.sh || (cat /tmp/garage.log; false)
depends_on: [ build ]

View file

@ -18,12 +18,13 @@ steps:
- name: build
image: nixpkgs/nix:nixos-22.05
commands:
- nix-build --attr releasePackages.${ARCH} --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- nix-build --no-build-output --attr pkgs.${ARCH}.release --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- name: check is static binary
image: nixpkgs/nix:nixos-22.05
commands:
- nix-shell --attr ci --run "./script/not-dynamic.sh result/bin/garage"
- nix-build --no-build-output --attr pkgs.${ARCH}.release --argstr git_version ${CI_COMMIT_TAG:-$CI_COMMIT_SHA}
- nix-shell --attr ci --run "./script/not-dynamic.sh result-bin/bin/garage"
- name: integration tests
image: nixpkgs/nix:nixos-22.05

1669
Cargo.lock generated

File diff suppressed because it is too large Load diff

7102
Cargo.nix Normal file

File diff suppressed because it is too large Load diff

View file

@ -8,10 +8,7 @@ members = [
"src/table",
"src/block",
"src/model",
"src/api/common",
"src/api/s3",
"src/api/k2v",
"src/api/admin",
"src/api",
"src/web",
"src/garage",
"src/k2v-client",
@ -24,10 +21,7 @@ default-members = ["src/garage"]
# Internal Garage crates
format_table = { version = "0.1.1", path = "src/format-table" }
garage_api_common = { version = "1.0.1", path = "src/api/common" }
garage_api_admin = { version = "1.0.1", path = "src/api/admin" }
garage_api_s3 = { version = "1.0.1", path = "src/api/s3" }
garage_api_k2v = { version = "1.0.1", path = "src/api/k2v" }
garage_api = { version = "1.0.1", path = "src/api" }
garage_block = { version = "1.0.1", path = "src/block" }
garage_db = { version = "1.0.1", path = "src/db", default-features = false }
garage_model = { version = "1.0.1", path = "src/model", default-features = false }
@ -52,6 +46,7 @@ chrono = "0.4"
crc32fast = "1.4"
crc32c = "0.6"
crypto-common = "0.1"
digest = "0.10"
err-derive = "0.3"
gethostname = "0.4"
git-version = "0.3.4"
@ -64,7 +59,7 @@ ipnet = "2.9.0"
lazy_static = "1.4"
md-5 = "0.10"
mktemp = "0.5"
nix = { version = "0.29", default-features = false, features = ["fs"] }
nix = { version = "0.27", default-features = false, features = ["fs"] }
nom = "7.1"
parse_duration = "2.1"
pin-project = "1.0.12"
@ -141,6 +136,8 @@ thiserror = "1.0"
assert-json-diff = "2.0"
rustc_version = "0.4.0"
static_init = "1.0"
aws-config = "1.1.4"
aws-sdk-config = "1.13"
aws-sdk-s3 = "1.14"

View file

@ -3,5 +3,5 @@ FROM scratch
ENV RUST_BACKTRACE=1
ENV RUST_LOG=garage=info
COPY result/bin/garage /
COPY result-bin/bin/garage /
CMD [ "/garage", "server"]

View file

@ -1,10 +1,13 @@
.PHONY: doc all run1 run2 run3
.PHONY: doc all release shell run1 run2 run3
all:
clear
cargo build \
--config 'target.x86_64-unknown-linux-gnu.linker="clang"' \
--config 'target.x86_64-unknown-linux-gnu.rustflags=["-C", "link-arg=-fuse-ld=mold"]' \
clear; cargo build
release:
nix-build --attr pkgs.amd64.release --no-build-output
shell:
nix-shell
# ----

View file

@ -3,22 +3,46 @@
with import ./nix/common.nix;
let
pkgs = import nixpkgs { };
pkgs = import pkgsSrc { };
compile = import ./nix/compile.nix;
build_release = target: (compile {
inherit target system git_version nixpkgs;
crane = flake.inputs.crane;
rust-overlay = flake.inputs.rust-overlay;
release = true;
}).garage;
build_debug_and_release = (target: {
debug = (compile {
inherit system target git_version pkgsSrc cargo2nixOverlay;
release = false;
}).workspace.garage { compileMode = "build"; };
release = (compile {
inherit system target git_version pkgsSrc cargo2nixOverlay;
release = true;
}).workspace.garage { compileMode = "build"; };
});
test = (rustPkgs:
pkgs.symlinkJoin {
name = "garage-tests";
paths =
builtins.map (key: rustPkgs.workspace.${key} { compileMode = "test"; })
(builtins.attrNames rustPkgs.workspace);
});
in {
releasePackages = {
amd64 = build_release "x86_64-unknown-linux-musl";
i386 = build_release "i686-unknown-linux-musl";
arm64 = build_release "aarch64-unknown-linux-musl";
arm = build_release "armv6l-unknown-linux-musleabihf";
pkgs = {
amd64 = build_debug_and_release "x86_64-unknown-linux-musl";
i386 = build_debug_and_release "i686-unknown-linux-musl";
arm64 = build_debug_and_release "aarch64-unknown-linux-musl";
arm = build_debug_and_release "armv6l-unknown-linux-musleabihf";
};
test = {
amd64 = test (compile {
inherit system git_version pkgsSrc cargo2nixOverlay;
target = "x86_64-unknown-linux-musl";
features = [
"garage/bundled-libs"
"garage/k2v"
"garage/lmdb"
"garage/sqlite"
];
});
};
flakePackages = flake.packages.${system};
}

View file

@ -17,7 +17,7 @@ Garage can also help you serve this content.
## Gitea
You can use Garage with Gitea to store your [git LFS](https://git-lfs.github.com/) data, your users' avatar, and their attachments.
You can use Garage with Gitea to store your [git LFS](https://git-lfs.github.com/) data, your users' avatar, and their attachements.
You can configure a different target for each data type (check `[lfs]` and `[attachment]` sections of the Gitea documentation) and you can provide a default one through the `[storage]` section.
Let's start by creating a key and a bucket (your key id and secret will be needed later, keep them somewhere):

View file

@ -21,14 +21,14 @@ data_dir = [
```
Garage will automatically balance all blocks stored by the node
among the different specified directories, proportionally to the
among the different specified directories, proportionnally to the
specified capacities.
## Updating the list of storage locations
If you add new storage locations to your `data_dir`,
Garage will not rebalance existing data between storage locations.
Newly written blocks will be balanced proportionally to the specified capacities,
Newly written blocks will be balanced proportionnally to the specified capacities,
and existing data may be moved between drives to improve balancing,
but only opportunistically when a data block is re-written (e.g. an object
is re-uploaded, or an object with a duplicate block is uploaded).

View file

@ -13,7 +13,6 @@ consistency_mode = "consistent"
metadata_dir = "/var/lib/garage/meta"
data_dir = "/var/lib/garage/data"
metadata_snapshots_dir = "/var/lib/garage/snapshots"
metadata_fsync = true
data_fsync = false
disable_scrub = false
@ -106,7 +105,6 @@ Top-level configuration options:
[`metadata_auto_snapshot_interval`](#metadata_auto_snapshot_interval),
[`metadata_dir`](#metadata_dir),
[`metadata_fsync`](#metadata_fsync),
[`metadata_snapshots_dir`](#metadata_snapshots_dir),
[`replication_factor`](#replication_factor),
[`consistency_mode`](#consistency_mode),
[`rpc_bind_addr`](#rpc_bind_addr),
@ -277,7 +275,6 @@ as the index of all objects, object version and object blocks.
Store this folder on a fast SSD drive if possible to maximize Garage's performance.
#### `data_dir` {#data_dir}
The directory in which Garage will store the data blocks of objects.
@ -298,25 +295,6 @@ data_dir = [
See [the dedicated documentation page](@/documentation/operations/multi-hdd.md)
on how to operate Garage in such a setup.
#### `metadata_snapshots_dir` (since Garage `v1.0.2`) {#metadata_snapshots_dir}
The directory in which Garage will store metadata snapshots when it
performs a snapshot of the metadata database, either when instructed to do
so from a RPC call or regularly through
[`metadata_auto_snapshot_interval`](#metadata_auto_snapshot_interval).
By default, Garage will store snapshots into a `snapshots/` subdirectory
of [`metadata_dir`](#metadata_dir). This might quickly fill up your
metadata storage space if you use snapshots, because Garage will need up
to 4x the space of the existing metadata database: each snapshot requires
roughly as much space as the original database, and Garage temporarily
needs to store up to three different snapshots before it cleans up the oldest
snapshot to go back to two stored snapshots.
To prevent filling your disk, you might to change this setting to a
directory with ample available space, e.g. on the same storage space as
[`data_dir`](#data_dir).
#### `db_engine` (since `v0.8.0`) {#db_engine}
Since `v0.8.0`, Garage can use alternative storage backends as follows:

70
flake.lock generated
View file

@ -1,17 +1,28 @@
{
"nodes": {
"crane": {
"cargo2nix": {
"inputs": {
"flake-compat": [
"flake-compat"
],
"flake-utils": "flake-utils",
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": "rust-overlay"
},
"locked": {
"lastModified": 1737689766,
"narHash": "sha256-ivVXYaYlShxYoKfSo5+y5930qMKKJ8CLcAoIBPQfJ6s=",
"owner": "ipetkov",
"repo": "crane",
"rev": "6fe74265bbb6d016d663b1091f015e2976c4a527",
"lastModified": 1705129117,
"narHash": "sha256-LgdDHibvimzYhxBK3kxCk2gAL7k4Hyigl5KI0X9cijA=",
"owner": "cargo2nix",
"repo": "cargo2nix",
"rev": "ae19a9e1f8f0880c088ea155ab66cee1fa001f59",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"owner": "cargo2nix",
"repo": "cargo2nix",
"rev": "ae19a9e1f8f0880c088ea155ab66cee1fa001f59",
"type": "github"
}
},
@ -31,15 +42,12 @@
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"lastModified": 1659877975,
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
"type": "github"
},
"original": {
@ -66,46 +74,34 @@
},
"root": {
"inputs": {
"crane": "crane",
"cargo2nix": "cargo2nix",
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
"flake-utils": [
"cargo2nix",
"flake-utils"
],
"nixpkgs": "nixpkgs"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"cargo2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1738549608,
"narHash": "sha256-GdyT9QEUSx5k/n8kILuNy83vxxdyUfJ8jL5mMpQZWfw=",
"lastModified": 1736649126,
"narHash": "sha256-XCw5sv/ePsroqiF3lJM6Y2X9EhPdHeE47gr3Q8b0UQw=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "35c6f8c4352f995ecd53896200769f80a3e8f22d",
"rev": "162ab0edc2936508470199b2e8e6c444a2535019",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "35c6f8c4352f995ecd53896200769f80a3e8f22d",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"rev": "162ab0edc2936508470199b2e8e6c444a2535019",
"type": "github"
}
}

120
flake.nix
View file

@ -2,84 +2,92 @@
description =
"Garage, an S3-compatible distributed object store for self-hosted deployments";
# Nixpkgs 24.11 as of 2025-01-12
# Nixpkgs 24.11 as of 2025-01-12 has rustc v1.82
inputs.nixpkgs.url =
"github:NixOS/nixpkgs/7c4869c47090dd7f9f1bdfb49a22aea026996815";
# Rust overlay as of 2025-02-03
inputs.rust-overlay.url =
"github:oxalica/rust-overlay/35c6f8c4352f995ecd53896200769f80a3e8f22d";
inputs.rust-overlay.inputs.nixpkgs.follows = "nixpkgs";
inputs.crane.url = "github:ipetkov/crane";
inputs.flake-compat.url = "github:nix-community/flake-compat";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils, crane, rust-overlay, ... }:
inputs.cargo2nix = {
# As of 2022-10-18: two small patches over unstable branch, one for clippy and one to fix feature detection
#url = "github:Alexis211/cargo2nix/a7a61179b66054904ef6a195d8da736eaaa06c36";
# As of 2023-04-25:
# - my two patches were merged into unstable (one for clippy and one to "fix" feature detection)
# - rustc v1.66
# url = "github:cargo2nix/cargo2nix/8fb57a670f7993bfc24099c33eb9c5abb51f29a2";
# Mainline cargo2nix as of of 2025-01-12 (branch release-0.11.0)
url = "github:cargo2nix/cargo2nix/ae19a9e1f8f0880c088ea155ab66cee1fa001f59";
# Rust overlay as of 2025-01-12
inputs.rust-overlay.url =
"github:oxalica/rust-overlay/162ab0edc2936508470199b2e8e6c444a2535019";
inputs.nixpkgs.follows = "nixpkgs";
inputs.flake-compat.follows = "flake-compat";
};
inputs.flake-utils.follows = "cargo2nix/flake-utils";
outputs = { self, nixpkgs, cargo2nix, flake-utils, ... }:
let
git_version = self.lastModifiedDate;
compile = import ./nix/compile.nix;
in
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = nixpkgs.legacyPackages.${system};
packageFor = target: release: (compile {
inherit system target nixpkgs crane rust-overlay release;
}).garage;
testWith = extraTestEnv: (compile {
inherit system nixpkgs crane rust-overlay extraTestEnv;
release = false;
}).garage-test;
in
{
packages = {
# default = native release build
default = packageFor null true;
# <arch> = cross-compiled, statically-linked release builds
amd64 = packageFor "x86_64-unknown-linux-musl" true;
i386 = packageFor "i686-unknown-linux-musl" true;
arm64 = packageFor "aarch64-unknown-linux-musl" true;
arm = packageFor "armv6l-unknown-linux-musl" true;
# dev = native dev build
dev = packageFor null false;
# test = cargo test
tests = testWith {};
tests-lmdb = testWith {
GARAGE_TEST_INTEGRATION_DB_ENGINE = "lmdb";
packages =
let
packageFor = target: (compile {
inherit system git_version target;
pkgsSrc = nixpkgs;
cargo2nixOverlay = cargo2nix.overlays.default;
release = true;
}).workspace.garage { compileMode = "build"; };
in
{
# default = native release build
default = packageFor null;
# other = cross-compiled, statically-linked builds
amd64 = packageFor "x86_64-unknown-linux-musl";
i386 = packageFor "i686-unknown-linux-musl";
arm64 = packageFor "aarch64-unknown-linux-musl";
arm = packageFor "armv6l-unknown-linux-musl";
};
tests-sqlite = testWith {
GARAGE_TEST_INTEGRATION_DB_ENGINE = "sqlite";
};
};
# ---- developpment shell, for making native builds only ----
devShells =
let
targets = compile {
inherit system nixpkgs crane rust-overlay;
};
shellWithPackages = (packages: (compile {
inherit system git_version;
pkgsSrc = nixpkgs;
cargo2nixOverlay = cargo2nix.overlays.default;
}).workspaceShell { inherit packages; });
in
{
default = targets.devShell;
# import the full shell using `nix develop .#full`
full = pkgs.mkShell {
buildInputs = with pkgs; [
targets.toolchain
protobuf
default = shellWithPackages
(with pkgs; [
rustfmt
clang
mold
# ---- extra packages for dev tasks ----
rust-analyzer
cargo-audit
cargo-outdated
cargo-machete
nixpkgs-fmt
];
};
]);
# import the full shell using `nix develop .#full`
full = shellWithPackages (with pkgs; [
rustfmt
rust-analyzer
clang
mold
# ---- extra packages for dev tasks ----
cargo-audit
cargo-outdated
cargo-machete
nixpkgs-fmt
]);
};
});
}

View file

@ -2,7 +2,7 @@
with import ./common.nix;
let
pkgs = import nixpkgs { };
pkgs = import pkgsSrc { };
lib = pkgs.lib;
/* Converts a key list and a value list to a set

View file

@ -10,9 +10,9 @@ let
flake = (import flake-compat { system = builtins.currentSystem; src = ../.; });
in
{
flake = flake.defaultNix;
nixpkgs = flake.defaultNix.inputs.nixpkgs;
devShells = flake.defaultNix.devShells.${builtins.currentSystem};
rec {
pkgsSrc = flake.defaultNix.inputs.nixpkgs;
cargo2nix = flake.defaultNix.inputs.cargo2nix;
cargo2nixOverlay = cargo2nix.overlays.default;
devShells = builtins.getAttr builtins.currentSystem flake.defaultNix.devShells;
}

View file

@ -1,64 +1,83 @@
{
/* build inputs */
nixpkgs,
crane,
rust-overlay,
/* parameters */
system,
git_version ? null,
target ? null,
release ? false,
features ? null,
extraTestEnv ? {}
}:
{ system, target ? null, pkgsSrc, cargo2nixOverlay
, release ? false, git_version ? null, features ? null, }:
let
log = v: builtins.trace v v;
# NixOS and Rust/Cargo triples do not match for ARM, fix it here.
rustTarget = if target == "armv6l-unknown-linux-musleabihf" then
"arm-unknown-linux-musleabihf"
else
target;
rustTargetEnvMap = {
"x86_64-unknown-linux-musl" = "X86_64_UNKNOWN_LINUX_MUSL";
"aarch64-unknown-linux-musl" = "AARCH64_UNKNOWN_LINUX_MUSL";
"i686-unknown-linux-musl" = "I686_UNKNOWN_LINUX_MUSL";
"arm-unknown-linux-musleabihf" = "ARM_UNKNOWN_LINUX_MUSLEABIHF";
};
pkgsNative = import nixpkgs {
inherit system;
overlays = [ (import rust-overlay) ];
};
pkgs = if target != null then
import nixpkgs {
import pkgsSrc {
inherit system;
crossSystem = {
config = target;
isStatic = true;
};
overlays = [ (import rust-overlay) ];
overlays = [ cargo2nixOverlay ];
}
else
pkgsNative;
import pkgsSrc {
inherit system;
overlays = [ cargo2nixOverlay ];
};
inherit (pkgs) lib stdenv;
toolchainOptions = {
rustVersion = "1.78.0";
extraRustComponents = [ "clippy" ];
};
toolchainFn = (p: p.rust-bin.stable."1.82.0".default.override {
targets = lib.optionals (target != null) [ rustTarget ];
extensions = [
"rust-src"
"rustfmt"
/* Cargo2nix provides many overrides by default, you can take inspiration from them:
https://github.com/cargo2nix/cargo2nix/blob/master/overlay/overrides.nix
You can have a complete list of the available options by looking at the overriden object, mkcrate:
https://github.com/cargo2nix/cargo2nix/blob/master/overlay/mkcrate.nix
*/
packageOverrides = pkgs:
pkgs.rustBuilder.overrides.all ++ [
/* [1] We need to alter Nix hardening to make static binaries: PIE,
Position Independent Executables seems to be supported only on amd64. Having
this flag set either 1. make our executables crash or 2. compile as dynamic on some platforms.
Here, we deactivate it. Later (find `codegenOpts`), we reactivate it for supported targets
(only amd64 curently) through the `-static-pie` flag.
PIE is a feature used by ASLR, which helps mitigate security issues.
Learn more about Nix Hardening at: https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/cc-wrapper/add-hardening.sh
[2] We want to inject the git version while keeping the build deterministic.
As we do not want to consider the .git folder as part of the input source,
we ask the user (the CI often) to pass the value to Nix.
[3] We don't want libsodium-sys and zstd-sys to try to use pkgconfig to build against a system library.
However the features to do so get activated for some reason (due to a bug in cargo2nix?),
so disable them manually here.
*/
(pkgs.rustBuilder.rustLib.makeOverride {
name = "garage";
overrideAttrs = drv:
(if git_version != null then {
# [2]
preConfigure = ''
${drv.preConfigure or ""}
export GIT_VERSION="${git_version}"
'';
} else
{ }) // {
# [1]
hardeningDisable = [ "pie" ];
};
})
(pkgs.rustBuilder.rustLib.makeOverride {
name = "libsodium-sys";
overrideArgs = old: {
features = [ ]; # [3]
};
})
(pkgs.rustBuilder.rustLib.makeOverride {
name = "zstd-sys";
overrideArgs = old: {
features = [ ]; # [3]
};
})
];
});
craneLib = (crane.mkLib pkgs).overrideToolchain toolchainFn;
src = craneLib.cleanCargoSource ../.;
/* We ship some parts of the code disabled by default by putting them behind a flag.
It speeds up the compilation (when the feature is not required) and released crates have less dependency by default (less attack surface, disk space, etc.).
@ -68,15 +87,16 @@ let
rootFeatures = if features != null then
features
else
([ "bundled-libs" "lmdb" "sqlite" "k2v" ] ++ (lib.optionals release [
"consul-discovery"
"kubernetes-discovery"
"metrics"
"telemetry-otlp"
"syslog"
]));
([ "garage/bundled-libs" "garage/lmdb" "garage/sqlite" "garage/k2v" ] ++ (if release then [
"garage/consul-discovery"
"garage/kubernetes-discovery"
"garage/metrics"
"garage/telemetry-otlp"
"garage/syslog"
] else
[ ]));
featuresStr = lib.concatStringsSep "," rootFeatures;
packageFun = import ../Cargo.nix;
/* We compile fully static binaries with musl to simplify deployment on most systems.
When possible, we reactivate PIE hardening (see above).
@ -87,9 +107,12 @@ let
For more information on static builds, please refer to Rust's RFC 1721.
https://rust-lang.github.io/rfcs/1721-crt-static.html#specifying-dynamicstatic-c-runtime-linkage
*/
codegenOptsMap = {
"x86_64-unknown-linux-musl" =
[ "target-feature=+crt-static" "link-arg=-static-pie" ];
codegenOpts = {
"armv6l-unknown-linux-musleabihf" = [
"target-feature=+crt-static"
"link-arg=-static"
]; # compile as dynamic with static-pie
"aarch64-unknown-linux-musl" = [
"target-feature=+crt-static"
"link-arg=-static"
@ -98,95 +121,17 @@ let
"target-feature=+crt-static"
"link-arg=-static"
]; # segfault with static-pie
"armv6l-unknown-linux-musleabihf" = [
"target-feature=+crt-static"
"link-arg=-static"
]; # compile as dynamic with static-pie
"x86_64-unknown-linux-musl" =
[ "target-feature=+crt-static" "link-arg=-static-pie" ];
};
codegenOpts = if target != null then codegenOptsMap.${target} else [
"link-arg=-fuse-ld=mold"
];
# NixOS and Rust/Cargo triples do not match for ARM, fix it here.
rustTarget = if target == "armv6l-unknown-linux-musleabihf" then
"arm-unknown-linux-musleabihf"
else
target;
commonArgs =
{
inherit src;
pname = "garage";
version = "dev";
strictDeps = true;
cargoExtraArgs = "--locked --features ${featuresStr}";
cargoTestExtraArgs = "--workspace";
nativeBuildInputs = [
pkgsNative.protobuf
pkgs.stdenv.cc
] ++ lib.optionals (target == null) [
pkgs.clang
pkgs.mold
];
CARGO_PROFILE = if release then "release" else "dev";
CARGO_BUILD_RUSTFLAGS =
lib.concatStringsSep
" "
(builtins.map (flag: "-C ${flag}") codegenOpts);
}
//
(if rustTarget != null then {
CARGO_BUILD_TARGET = rustTarget;
"CARGO_TARGET_${rustTargetEnvMap.${rustTarget}}_LINKER" = "${stdenv.cc.targetPrefix}cc";
HOST_CC = "${stdenv.cc.nativePrefix}cc";
TARGET_CC = "${stdenv.cc.targetPrefix}cc";
} else {
CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER = "clang";
});
in rec {
toolchain = toolchainFn pkgs;
devShell = pkgs.mkShell {
buildInputs = [
toolchain
] ++ (with pkgs; [
protobuf
clang
mold
]);
};
# ---- building garage ----
garage-deps = craneLib.buildDepsOnly commonArgs;
garage = craneLib.buildPackage (commonArgs // {
cargoArtifacts = garage-deps;
doCheck = false;
} //
(if git_version != null then {
version = git_version;
GIT_VERSION = git_version;
} else {}));
# ---- testing garage ----
garage-test-bin = craneLib.cargoBuild (commonArgs // {
cargoArtifacts = garage-deps;
pname = "garage-tests";
CARGO_PROFILE = "test";
cargoExtraArgs = "${commonArgs.cargoExtraArgs} --tests --workspace";
doCheck = false;
});
garage-test = craneLib.cargoTest (commonArgs // {
cargoArtifacts = garage-test-bin;
nativeBuildInputs = commonArgs.nativeBuildInputs ++ [
pkgs.cacert
];
} // extraTestEnv);
}
in pkgs.rustBuilder.makePackageSet ({
inherit release packageFun packageOverrides codegenOpts rootFeatures;
target = rustTarget;
} // toolchainOptions)

View file

@ -7,12 +7,7 @@ if [ "$#" -ne 1 ]; then
exit 2
fi
if [ ! -x "$1" ]; then
echo "[fail] $1 does not exist or is not an executable"
exit 1
fi
if file "$1" | grep 'dynamically linked' 2>&1; then
if file $1 | grep 'dynamically linked' 2>&1; then
echo "[fail] $1 is dynamic"
exit 1
fi

View file

@ -3,7 +3,7 @@
with import ./nix/common.nix;
let
pkgs = import nixpkgs {
pkgs = import pkgsSrc {
inherit system;
};
winscp = (import ./nix/winscp.nix) pkgs;
@ -39,7 +39,7 @@ in
--endpoint-url https://garage.deuxfleurs.fr \
--region garage \
s3 cp \
./result/bin/garage \
./result-bin/bin/garage \
s3://garagehq.deuxfleurs.fr/_releases/''${CI_COMMIT_TAG:-$CI_COMMIT_SHA}/''${TARGET}/garage
}

View file

@ -1,5 +1,5 @@
[package]
name = "garage_api_s3"
name = "garage_api"
version = "1.0.1"
authors = ["Alex Auvolat <alex@adnab.me>"]
edition = "2018"
@ -20,9 +20,9 @@ garage_block.workspace = true
garage_net.workspace = true
garage_util.workspace = true
garage_rpc.workspace = true
garage_api_common.workspace = true
aes-gcm.workspace = true
argon2.workspace = true
async-compression.workspace = true
async-trait.workspace = true
base64.workspace = true
@ -30,15 +30,20 @@ bytes.workspace = true
chrono.workspace = true
crc32fast.workspace = true
crc32c.workspace = true
crypto-common.workspace = true
err-derive.workspace = true
hex.workspace = true
hmac.workspace = true
idna.workspace = true
tracing.workspace = true
md-5.workspace = true
nom.workspace = true
pin-project.workspace = true
sha1.workspace = true
sha2.workspace = true
futures.workspace = true
futures-util.workspace = true
tokio.workspace = true
tokio-stream.workspace = true
tokio-util.workspace = true
@ -49,13 +54,21 @@ httpdate.workspace = true
http-range.workspace = true
http-body-util.workspace = true
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
hyper-util.workspace = true
multer.workspace = true
percent-encoding.workspace = true
roxmltree.workspace = true
url.workspace = true
serde.workspace = true
serde_bytes.workspace = true
serde_json.workspace = true
quick-xml.workspace = true
opentelemetry.workspace = true
opentelemetry-prometheus = { workspace = true, optional = true }
prometheus = { workspace = true, optional = true }
[features]
k2v = [ "garage_util/k2v", "garage_model/k2v" ]
metrics = [ "opentelemetry-prometheus", "prometheus" ]

View file

@ -1,43 +0,0 @@
[package]
name = "garage_api_admin"
version = "1.0.1"
authors = ["Alex Auvolat <alex@adnab.me>"]
edition = "2018"
license = "AGPL-3.0"
description = "Admin API server crate for the Garage object store"
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
readme = "../../README.md"
[lib]
path = "lib.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
garage_model.workspace = true
garage_table.workspace = true
garage_util.workspace = true
garage_rpc.workspace = true
garage_api_common.workspace = true
argon2.workspace = true
async-trait.workspace = true
err-derive.workspace = true
hex.workspace = true
tracing.workspace = true
futures.workspace = true
tokio.workspace = true
http.workspace = true
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
url.workspace = true
serde.workspace = true
serde_json.workspace = true
opentelemetry.workspace = true
opentelemetry-prometheus = { workspace = true, optional = true }
prometheus = { workspace = true, optional = true }
[features]
metrics = [ "opentelemetry-prometheus", "prometheus" ]

View file

@ -20,15 +20,15 @@ use garage_rpc::system::ClusterHealthStatus;
use garage_util::error::Error as GarageError;
use garage_util::socket_address::UnixOrTCPSocketAddress;
use garage_api_common::generic_server::*;
use garage_api_common::helpers::*;
use crate::generic_server::*;
use crate::bucket::*;
use crate::cluster::*;
use crate::error::*;
use crate::key::*;
use crate::router_v0;
use crate::router_v1::{Authorization, Endpoint};
use crate::admin::bucket::*;
use crate::admin::cluster::*;
use crate::admin::error::*;
use crate::admin::key::*;
use crate::admin::router_v0;
use crate::admin::router_v1::{Authorization, Endpoint};
use crate::helpers::*;
pub type ResBody = BoxBody<Error>;

View file

@ -17,12 +17,11 @@ use garage_model::permission::*;
use garage_model::s3::mpu_table;
use garage_model::s3::object_table::*;
use garage_api_common::common_error::CommonError;
use garage_api_common::helpers::*;
use crate::api_server::ResBody;
use crate::error::*;
use crate::key::ApiBucketKeyPerm;
use crate::admin::api_server::ResBody;
use crate::admin::error::*;
use crate::admin::key::ApiBucketKeyPerm;
use crate::common_error::CommonError;
use crate::helpers::*;
pub async fn handle_list_buckets(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let buckets = garage

View file

@ -12,10 +12,9 @@ use garage_rpc::layout;
use garage_model::garage::Garage;
use garage_api_common::helpers::{json_ok_response, parse_json_body};
use crate::api_server::ResBody;
use crate::error::*;
use crate::admin::api_server::ResBody;
use crate::admin::error::*;
use crate::helpers::{json_ok_response, parse_json_body};
pub async fn handle_get_cluster_status(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let layout = garage.system.cluster_layout();

View file

@ -1,24 +1,20 @@
use std::convert::TryFrom;
use err_derive::Error;
use hyper::header::HeaderValue;
use hyper::{HeaderMap, StatusCode};
pub use garage_model::helper::error::Error as HelperError;
use garage_api_common::common_error::{commonErrorDerivative, CommonError};
pub use garage_api_common::common_error::{
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
};
use garage_api_common::generic_server::ApiError;
use garage_api_common::helpers::*;
use crate::common_error::CommonError;
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
use crate::generic_server::ApiError;
use crate::helpers::*;
/// Errors of this crate
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)]
/// Error from common error
Common(#[error(source)] CommonError),
Common(CommonError),
// Category: cannot process
/// The API access key does not exist
@ -33,21 +29,17 @@ pub enum Error {
KeyAlreadyExists(String),
}
commonErrorDerivative!(Error);
/// FIXME: helper errors are transformed into their corresponding variants
/// in the Error struct, but in many case a helper error should be considered
/// an internal error.
impl From<HelperError> for Error {
fn from(err: HelperError) -> Error {
match CommonError::try_from(err) {
Ok(ce) => Self::Common(ce),
Err(HelperError::NoSuchAccessKey(k)) => Self::NoSuchAccessKey(k),
Err(_) => unreachable!(),
}
impl<T> From<T> for Error
where
CommonError: From<T>,
{
fn from(err: T) -> Self {
Error::Common(CommonError::from(err))
}
}
impl CommonErrorDerivative for Error {}
impl Error {
fn code(&self) -> &'static str {
match self {

View file

@ -9,10 +9,9 @@ use garage_table::*;
use garage_model::garage::Garage;
use garage_model::key_table::*;
use garage_api_common::helpers::*;
use crate::api_server::ResBody;
use crate::error::*;
use crate::admin::api_server::ResBody;
use crate::admin::error::*;
use crate::helpers::*;
pub async fn handle_list_keys(garage: &Arc<Garage>) -> Result<Response<ResBody>, Error> {
let res = garage

View file

@ -1,6 +1,3 @@
#[macro_use]
extern crate tracing;
pub mod api_server;
mod error;
mod router_v0;

View file

@ -2,9 +2,8 @@ use std::borrow::Cow;
use hyper::{Method, Request};
use garage_api_common::router_macros::*;
use crate::error::*;
use crate::admin::error::*;
use crate::router_macros::*;
router_match! {@func

View file

@ -2,10 +2,9 @@ use std::borrow::Cow;
use hyper::{Method, Request};
use garage_api_common::router_macros::*;
use crate::error::*;
use crate::router_v0;
use crate::admin::error::*;
use crate::admin::router_v0;
use crate::router_macros::*;
pub enum Authorization {
None,

View file

@ -1,45 +0,0 @@
[package]
name = "garage_api_common"
version = "1.0.1"
authors = ["Alex Auvolat <alex@adnab.me>"]
edition = "2018"
license = "AGPL-3.0"
description = "Common functions for the API server crates for the Garage object store"
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
readme = "../../README.md"
[lib]
path = "lib.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
garage_model.workspace = true
garage_table.workspace = true
garage_util.workspace = true
async-trait.workspace = true
bytes.workspace = true
chrono.workspace = true
crypto-common.workspace = true
err-derive.workspace = true
hex.workspace = true
hmac.workspace = true
idna.workspace = true
tracing.workspace = true
nom.workspace = true
pin-project.workspace = true
sha2.workspace = true
futures.workspace = true
tokio.workspace = true
http.workspace = true
http-body-util.workspace = true
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
hyper-util.workspace = true
url.workspace = true
serde.workspace = true
serde_json.workspace = true
opentelemetry.workspace = true

View file

@ -1,170 +0,0 @@
use std::sync::Arc;
use http::header::{
ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD,
};
use hyper::{body::Body, body::Incoming as IncomingBody, Request, Response, StatusCode};
use garage_model::bucket_table::{BucketParams, CorsRule as GarageCorsRule};
use garage_model::garage::Garage;
use crate::common_error::{
helper_error_as_internal, CommonError, OkOrBadRequest, OkOrInternalError,
};
use crate::helpers::*;
pub fn find_matching_cors_rule<'a>(
bucket_params: &'a BucketParams,
req: &Request<impl Body>,
) -> Result<Option<&'a GarageCorsRule>, CommonError> {
if let Some(cors_config) = bucket_params.cors_config.get() {
if let Some(origin) = req.headers().get("Origin") {
let origin = origin.to_str()?;
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
None => vec![],
};
return Ok(cors_config.iter().find(|rule| {
cors_rule_matches(rule, origin, req.method().as_ref(), request_headers.iter())
}));
}
}
Ok(None)
}
pub fn cors_rule_matches<'a, HI, S>(
rule: &GarageCorsRule,
origin: &'a str,
method: &'a str,
mut request_headers: HI,
) -> bool
where
HI: Iterator<Item = S>,
S: AsRef<str>,
{
rule.allow_origins.iter().any(|x| x == "*" || x == origin)
&& rule.allow_methods.iter().any(|x| x == "*" || x == method)
&& request_headers.all(|h| {
rule.allow_headers
.iter()
.any(|x| x == "*" || x == h.as_ref())
})
}
pub fn add_cors_headers(
resp: &mut Response<impl Body>,
rule: &GarageCorsRule,
) -> Result<(), http::header::InvalidHeaderValue> {
let h = resp.headers_mut();
h.insert(
ACCESS_CONTROL_ALLOW_ORIGIN,
rule.allow_origins.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_ALLOW_METHODS,
rule.allow_methods.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_ALLOW_HEADERS,
rule.allow_headers.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_EXPOSE_HEADERS,
rule.expose_headers.join(", ").parse()?,
);
Ok(())
}
pub async fn handle_options_api(
garage: Arc<Garage>,
req: &Request<IncomingBody>,
bucket_name: Option<String>,
) -> Result<Response<EmptyBody>, CommonError> {
// FIXME: CORS rules of buckets with local aliases are
// not taken into account.
// If the bucket name is a global bucket name,
// we try to apply the CORS rules of that bucket.
// If a user has a local bucket name that has
// the same name, its CORS rules won't be applied
// and will be shadowed by the rules of the globally
// existing bucket (but this is inevitable because
// OPTIONS calls are not auhtenticated).
if let Some(bn) = bucket_name {
let helper = garage.bucket_helper();
let bucket_id = helper
.resolve_global_bucket_name(&bn)
.await
.map_err(helper_error_as_internal)?;
if let Some(id) = bucket_id {
let bucket = garage
.bucket_helper()
.get_existing_bucket(id)
.await
.map_err(helper_error_as_internal)?;
let bucket_params = bucket.state.into_option().unwrap();
handle_options_for_bucket(req, &bucket_params)
} else {
// If there is a bucket name in the request, but that name
// does not correspond to a global alias for a bucket,
// then it's either a non-existing bucket or a local bucket.
// We have no way of knowing, because the request is not
// authenticated and thus we can't resolve local aliases.
// We take the permissive approach of allowing everything,
// because we don't want to prevent web apps that use
// local bucket names from making API calls.
Ok(Response::builder()
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "*")
.status(StatusCode::OK)
.body(EmptyBody::new())?)
}
} else {
// If there is no bucket name in the request,
// we are doing a ListBuckets call, which we want to allow
// for all origins.
Ok(Response::builder()
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "GET")
.status(StatusCode::OK)
.body(EmptyBody::new())?)
}
}
pub fn handle_options_for_bucket(
req: &Request<IncomingBody>,
bucket_params: &BucketParams,
) -> Result<Response<EmptyBody>, CommonError> {
let origin = req
.headers()
.get("Origin")
.ok_or_bad_request("Missing Origin header")?
.to_str()?;
let request_method = req
.headers()
.get(ACCESS_CONTROL_REQUEST_METHOD)
.ok_or_bad_request("Missing Access-Control-Request-Method header")?
.to_str()?;
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
None => vec![],
};
if let Some(cors_config) = bucket_params.cors_config.get() {
let matching_rule = cors_config
.iter()
.find(|rule| cors_rule_matches(rule, origin, request_method, request_headers.iter()));
if let Some(rule) = matching_rule {
let mut resp = Response::builder()
.status(StatusCode::OK)
.body(EmptyBody::new())?;
add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?;
return Ok(resp);
}
}
Err(CommonError::Forbidden(
"This CORS request is not allowed.".into(),
))
}

View file

@ -1,12 +0,0 @@
//! Crate for serving a S3 compatible API
#[macro_use]
extern crate tracing;
pub mod common_error;
pub mod cors;
pub mod encoding;
pub mod generic_server;
pub mod helpers;
pub mod router_macros;
pub mod signature;

View file

@ -1,5 +1,3 @@
use std::convert::TryFrom;
use err_derive::Error;
use hyper::StatusCode;
@ -57,35 +55,6 @@ pub enum CommonError {
InvalidBucketName(String),
}
#[macro_export]
macro_rules! commonErrorDerivative {
( $error_struct: ident ) => {
impl From<garage_util::error::Error> for $error_struct {
fn from(err: garage_util::error::Error) -> Self {
Self::Common(CommonError::InternalError(err))
}
}
impl From<http::Error> for $error_struct {
fn from(err: http::Error) -> Self {
Self::Common(CommonError::Http(err))
}
}
impl From<hyper::Error> for $error_struct {
fn from(err: hyper::Error) -> Self {
Self::Common(CommonError::Hyper(err))
}
}
impl From<hyper::header::ToStrError> for $error_struct {
fn from(err: hyper::header::ToStrError) -> Self {
Self::Common(CommonError::InvalidHeader(err))
}
}
impl CommonErrorDerivative for $error_struct {}
};
}
pub use commonErrorDerivative;
impl CommonError {
pub fn http_status_code(&self) -> StatusCode {
match self {
@ -128,39 +97,18 @@ impl CommonError {
}
}
impl TryFrom<HelperError> for CommonError {
type Error = HelperError;
fn try_from(err: HelperError) -> Result<Self, HelperError> {
impl From<HelperError> for CommonError {
fn from(err: HelperError) -> Self {
match err {
HelperError::Internal(i) => Ok(Self::InternalError(i)),
HelperError::BadRequest(b) => Ok(Self::BadRequest(b)),
HelperError::InvalidBucketName(n) => Ok(Self::InvalidBucketName(n)),
HelperError::NoSuchBucket(n) => Ok(Self::NoSuchBucket(n)),
e => Err(e),
HelperError::Internal(i) => Self::InternalError(i),
HelperError::BadRequest(b) => Self::BadRequest(b),
HelperError::InvalidBucketName(n) => Self::InvalidBucketName(n),
HelperError::NoSuchBucket(n) => Self::NoSuchBucket(n),
e => Self::bad_request(format!("{}", e)),
}
}
}
/// This function converts HelperErrors into CommonErrors,
/// for variants that exist in CommonError.
/// This is used for helper functions that might return InvalidBucketName
/// or NoSuchBucket for instance, and we want to pass that error
/// up to our caller.
pub fn pass_helper_error(err: HelperError) -> CommonError {
match CommonError::try_from(err) {
Ok(e) => e,
Err(e) => panic!("Helper error `{}` should hot have happenned here", e),
}
}
pub fn helper_error_as_internal(err: HelperError) -> CommonError {
match err {
HelperError::Internal(e) => CommonError::InternalError(e),
e => CommonError::InternalError(GarageError::Message(e.to_string())),
}
}
pub trait CommonErrorDerivative: From<CommonError> {
fn internal_error<M: ToString>(msg: M) -> Self {
Self::from(CommonError::InternalError(GarageError::Message(

View file

@ -36,7 +36,7 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
use crate::helpers::{BoxBody, ErrorBody};
pub trait ApiEndpoint: Send + Sync + 'static {
pub(crate) trait ApiEndpoint: Send + Sync + 'static {
fn name(&self) -> &'static str;
fn add_span_attributes(&self, span: SpanRef<'_>);
}
@ -48,7 +48,7 @@ pub trait ApiError: std::error::Error + Send + Sync + 'static {
}
#[async_trait]
pub trait ApiHandler: Send + Sync + 'static {
pub(crate) trait ApiHandler: Send + Sync + 'static {
const API_NAME: &'static str;
const API_NAME_DISPLAY: &'static str;
@ -63,7 +63,7 @@ pub trait ApiHandler: Send + Sync + 'static {
) -> Result<Response<BoxBody<Self::Error>>, Self::Error>;
}
pub struct ApiServer<A: ApiHandler> {
pub(crate) struct ApiServer<A: ApiHandler> {
region: String,
api_handler: A,

View file

@ -363,9 +363,9 @@ mod tests {
}
#[derive(Serialize)]
pub struct CustomApiErrorBody {
pub code: String,
pub message: String,
pub region: String,
pub path: String,
pub(crate) struct CustomApiErrorBody {
pub(crate) code: String,
pub(crate) message: String,
pub(crate) region: String,
pub(crate) path: String,
}

View file

@ -1,38 +0,0 @@
[package]
name = "garage_api_k2v"
version = "1.0.1"
authors = ["Alex Auvolat <alex@adnab.me>"]
edition = "2018"
license = "AGPL-3.0"
description = "K2V API server crate for the Garage object store"
repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage"
readme = "../../README.md"
[lib]
path = "lib.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
garage_model = { workspace = true, features = [ "k2v" ] }
garage_table.workspace = true
garage_util = { workspace = true, features = [ "k2v" ] }
garage_api_common.workspace = true
async-trait.workspace = true
base64.workspace = true
err-derive.workspace = true
tracing.workspace = true
futures.workspace = true
tokio.workspace = true
http.workspace = true
http-body-util.workspace = true
hyper = { workspace = true, default-features = false, features = ["server", "http1"] }
percent-encoding.workspace = true
url.workspace = true
serde.workspace = true
serde_json.workspace = true
opentelemetry.workspace = true

View file

@ -12,25 +12,26 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
use garage_model::garage::Garage;
use garage_api_common::cors::*;
use garage_api_common::generic_server::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_request;
use crate::generic_server::*;
use crate::k2v::error::*;
use crate::batch::*;
use crate::error::*;
use crate::index::*;
use crate::item::*;
use crate::router::Endpoint;
use crate::signature::verify_request;
pub use garage_api_common::signature::streaming::ReqBody;
use crate::helpers::*;
use crate::k2v::batch::*;
use crate::k2v::index::*;
use crate::k2v::item::*;
use crate::k2v::router::Endpoint;
use crate::s3::cors::*;
pub use crate::signature::streaming::ReqBody;
pub type ResBody = BoxBody<Error>;
pub struct K2VApiServer {
garage: Arc<Garage>,
}
pub struct K2VApiEndpoint {
pub(crate) struct K2VApiEndpoint {
bucket_name: String,
endpoint: Endpoint,
}
@ -89,13 +90,11 @@ impl ApiHandler for K2VApiServer {
let bucket_id = garage
.bucket_helper()
.resolve_bucket(&bucket_name, &api_key)
.await
.map_err(pass_helper_error)?;
.await?;
let bucket = garage
.bucket_helper()
.get_existing_bucket(bucket_id)
.await
.map_err(helper_error_as_internal)?;
.await?;
let bucket_params = bucket.state.into_option().unwrap();
let allowed = match endpoint.authorization_type() {

View file

@ -4,14 +4,13 @@ use serde::{Deserialize, Serialize};
use garage_table::{EnumerationOrder, TableSchema};
use garage_model::k2v::causality::*;
use garage_model::k2v::item_table::*;
use garage_api_common::helpers::*;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::item::parse_causality_token;
use crate::range::read_range;
use crate::helpers::*;
use crate::k2v::api_server::{ReqBody, ResBody};
use crate::k2v::error::*;
use crate::k2v::range::read_range;
pub async fn handle_insert_batch(
ctx: ReqCtx,
@ -24,7 +23,7 @@ pub async fn handle_insert_batch(
let mut items2 = vec![];
for it in items {
let ct = it.ct.map(|s| parse_causality_token(&s)).transpose()?;
let ct = it.ct.map(|s| CausalContext::parse_helper(&s)).transpose()?;
let v = match it.v {
Some(vs) => DvvsValue::Value(
BASE64_STANDARD
@ -282,8 +281,7 @@ pub(crate) async fn handle_poll_range(
query.seen_marker,
timeout_msec,
)
.await
.map_err(pass_helper_error)?;
.await?;
if let Some((items, seen_marker)) = resp {
let resp = PollRangeResponse {

View file

@ -2,21 +2,18 @@ use err_derive::Error;
use hyper::header::HeaderValue;
use hyper::{HeaderMap, StatusCode};
use garage_api_common::common_error::{commonErrorDerivative, CommonError};
pub(crate) use garage_api_common::common_error::{helper_error_as_internal, pass_helper_error};
pub use garage_api_common::common_error::{
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
};
use garage_api_common::generic_server::ApiError;
use garage_api_common::helpers::*;
use garage_api_common::signature::error::Error as SignatureError;
use crate::common_error::CommonError;
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
use crate::generic_server::ApiError;
use crate::helpers::*;
use crate::signature::error::Error as SignatureError;
/// Errors of this crate
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)]
/// Error from common error
Common(#[error(source)] CommonError),
Common(CommonError),
// Category: cannot process
/// Authorization Header Malformed
@ -31,10 +28,6 @@ pub enum Error {
#[error(display = "Invalid base64: {}", _0)]
InvalidBase64(#[error(source)] base64::DecodeError),
/// Invalid causality token
#[error(display = "Invalid causality token")]
InvalidCausalityToken,
/// The client asked for an invalid return format (invalid Accept header)
#[error(display = "Not acceptable: {}", _0)]
NotAcceptable(String),
@ -44,7 +37,16 @@ pub enum Error {
InvalidUtf8Str(#[error(source)] std::str::Utf8Error),
}
commonErrorDerivative!(Error);
impl<T> From<T> for Error
where
CommonError: From<T>,
{
fn from(err: T) -> Self {
Error::Common(CommonError::from(err))
}
}
impl CommonErrorDerivative for Error {}
impl From<SignatureError> for Error {
fn from(err: SignatureError) -> Self {
@ -70,7 +72,6 @@ impl Error {
Error::AuthorizationHeaderMalformed(_) => "AuthorizationHeaderMalformed",
Error::InvalidBase64(_) => "InvalidBase64",
Error::InvalidUtf8Str(_) => "InvalidUtf8String",
Error::InvalidCausalityToken => "CausalityToken",
}
}
}
@ -84,8 +85,7 @@ impl ApiError for Error {
Error::NotAcceptable(_) => StatusCode::NOT_ACCEPTABLE,
Error::AuthorizationHeaderMalformed(_)
| Error::InvalidBase64(_)
| Error::InvalidUtf8Str(_)
| Error::InvalidCausalityToken => StatusCode::BAD_REQUEST,
| Error::InvalidUtf8Str(_) => StatusCode::BAD_REQUEST,
}
}

View file

@ -5,11 +5,10 @@ use garage_table::util::*;
use garage_model::k2v::item_table::{BYTES, CONFLICTS, ENTRIES, VALUES};
use garage_api_common::helpers::*;
use crate::api_server::ResBody;
use crate::error::*;
use crate::range::read_range;
use crate::helpers::*;
use crate::k2v::api_server::ResBody;
use crate::k2v::error::*;
use crate::k2v::range::read_range;
pub async fn handle_read_index(
ctx: ReqCtx,

View file

@ -6,10 +6,9 @@ use hyper::{Request, Response, StatusCode};
use garage_model::k2v::causality::*;
use garage_model::k2v::item_table::*;
use garage_api_common::helpers::*;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::helpers::*;
use crate::k2v::api_server::{ReqBody, ResBody};
use crate::k2v::error::*;
pub const X_GARAGE_CAUSALITY_TOKEN: &str = "X-Garage-Causality-Token";
@ -19,10 +18,6 @@ pub enum ReturnFormat {
Either,
}
pub(crate) fn parse_causality_token(s: &str) -> Result<CausalContext, Error> {
CausalContext::parse(s).ok_or(Error::InvalidCausalityToken)
}
impl ReturnFormat {
pub fn from(req: &Request<ReqBody>) -> Result<Self, Error> {
let accept = match req.headers().get(header::ACCEPT) {
@ -141,7 +136,7 @@ pub async fn handle_insert_item(
.get(X_GARAGE_CAUSALITY_TOKEN)
.map(|s| s.to_str())
.transpose()?
.map(parse_causality_token)
.map(CausalContext::parse_helper)
.transpose()?;
let body = http_body_util::BodyExt::collect(req.into_body())
@ -181,7 +176,7 @@ pub async fn handle_delete_item(
.get(X_GARAGE_CAUSALITY_TOKEN)
.map(|s| s.to_str())
.transpose()?
.map(parse_causality_token)
.map(CausalContext::parse_helper)
.transpose()?;
let value = DvvsValue::Deleted;

View file

@ -1,6 +1,3 @@
#[macro_use]
extern crate tracing;
pub mod api_server;
mod error;
mod router;

View file

@ -7,9 +7,8 @@ use std::sync::Arc;
use garage_table::replication::TableShardedReplication;
use garage_table::*;
use garage_api_common::helpers::key_after_prefix;
use crate::error::*;
use crate::helpers::key_after_prefix;
use crate::k2v::error::*;
/// Read range in a Garage table.
/// Returns (entries, more?, nextStart)

View file

@ -1,11 +1,11 @@
use crate::error::*;
use crate::k2v::error::*;
use std::borrow::Cow;
use hyper::{Method, Request};
use garage_api_common::helpers::Authorization;
use garage_api_common::router_macros::{generateQueryParameters, router_match};
use crate::helpers::Authorization;
use crate::router_macros::{generateQueryParameters, router_match};
router_match! {@func

17
src/api/lib.rs Normal file
View file

@ -0,0 +1,17 @@
//! Crate for serving a S3 compatible API
#[macro_use]
extern crate tracing;
pub mod common_error;
mod encoding;
pub mod generic_server;
pub mod helpers;
mod router_macros;
/// This mode is public only to help testing. Don't expect stability here
pub mod signature;
pub mod admin;
#[cfg(feature = "k2v")]
pub mod k2v;
pub mod s3;

View file

@ -1,6 +1,5 @@
/// This macro is used to generate very repetitive match {} blocks in this module
/// It is _not_ made to be used anywhere else
#[macro_export]
macro_rules! router_match {
(@match $enum:expr , [ $($endpoint:ident,)* ]) => {{
// usage: router_match {@match my_enum, [ VariantWithField1, VariantWithField2 ..] }
@ -134,7 +133,6 @@ macro_rules! router_match {
/// This macro is used to generate part of the code in this module. It must be called only one, and
/// is useless outside of this module.
#[macro_export]
macro_rules! generateQueryParameters {
(
keywords: [ $($kw_param:expr => $kw_name: ident),* ],
@ -222,5 +220,5 @@ macro_rules! generateQueryParameters {
}
}
pub use generateQueryParameters;
pub use router_match;
pub(crate) use generateQueryParameters;
pub(crate) use router_match;

View file

@ -14,33 +14,33 @@ use garage_util::socket_address::UnixOrTCPSocketAddress;
use garage_model::garage::Garage;
use garage_model::key_table::Key;
use garage_api_common::cors::*;
use garage_api_common::generic_server::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_request;
use crate::generic_server::*;
use crate::s3::error::*;
use crate::bucket::*;
use crate::copy::*;
use crate::cors::*;
use crate::delete::*;
use crate::error::*;
use crate::get::*;
use crate::lifecycle::*;
use crate::list::*;
use crate::multipart::*;
use crate::post_object::handle_post_object;
use crate::put::*;
use crate::router::Endpoint;
use crate::website::*;
use crate::signature::verify_request;
pub use garage_api_common::signature::streaming::ReqBody;
use crate::helpers::*;
use crate::s3::bucket::*;
use crate::s3::copy::*;
use crate::s3::cors::*;
use crate::s3::delete::*;
use crate::s3::get::*;
use crate::s3::lifecycle::*;
use crate::s3::list::*;
use crate::s3::multipart::*;
use crate::s3::post_object::handle_post_object;
use crate::s3::put::*;
use crate::s3::router::Endpoint;
use crate::s3::website::*;
pub use crate::signature::streaming::ReqBody;
pub type ResBody = BoxBody<Error>;
pub struct S3ApiServer {
garage: Arc<Garage>,
}
pub struct S3ApiEndpoint {
pub(crate) struct S3ApiEndpoint {
bucket_name: Option<String>,
endpoint: Endpoint,
}
@ -150,8 +150,7 @@ impl ApiHandler for S3ApiServer {
let bucket_id = garage
.bucket_helper()
.resolve_bucket(&bucket_name, &api_key)
.await
.map_err(pass_helper_error)?;
.await?;
let bucket = garage
.bucket_helper()
.get_existing_bucket(bucket_id)

View file

@ -13,13 +13,12 @@ use garage_util::crdt::*;
use garage_util::data::*;
use garage_util::time::*;
use garage_api_common::common_error::CommonError;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::xml as s3_xml;
use crate::common_error::CommonError;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*;
use crate::s3::xml as s3_xml;
use crate::signature::verify_signed_content;
pub fn handle_get_bucket_location(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
let ReqCtx { garage, .. } = ctx;

View file

@ -15,7 +15,7 @@ use garage_util::error::OkOrMessage;
use garage_model::s3::object_table::*;
use crate::error::*;
use crate::s3::error::*;
pub const X_AMZ_CHECKSUM_ALGORITHM: HeaderName =
HeaderName::from_static("x-amz-checksum-algorithm");

View file

@ -20,16 +20,15 @@ use garage_model::s3::mpu_table::*;
use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*;
use garage_api_common::helpers::*;
use crate::api_server::{ReqBody, ResBody};
use crate::checksum::*;
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::get::full_object_byte_stream;
use crate::multipart;
use crate::put::{get_headers, save_stream, ChecksumMode, SaveStreamResult};
use crate::xml::{self as s3_xml, xmlns_tag};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::checksum::*;
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
use crate::s3::get::full_object_byte_stream;
use crate::s3::multipart;
use crate::s3::put::{get_headers, save_stream, ChecksumMode, SaveStreamResult};
use crate::s3::xml::{self as s3_xml, xmlns_tag};
// -------- CopyObject ---------
@ -656,8 +655,7 @@ async fn get_copy_source(ctx: &ReqCtx, req: &Request<ReqBody>) -> Result<Object,
let source_bucket_id = garage
.bucket_helper()
.resolve_bucket(&source_bucket.to_string(), api_key)
.await
.map_err(pass_helper_error)?;
.await?;
if !api_key.allow_read(&source_bucket_id) {
return Err(Error::forbidden(format!(
@ -863,7 +861,7 @@ pub struct CopyPartResult {
#[cfg(test)]
mod tests {
use super::*;
use crate::xml::to_xml_with_header;
use crate::s3::xml::to_xml_with_header;
#[test]
fn copy_object_result() -> Result<(), Error> {

View file

@ -1,21 +1,30 @@
use quick_xml::de::from_reader;
use std::sync::Arc;
use hyper::{header::HeaderName, Method, Request, Response, StatusCode};
use http::header::{
ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD,
};
use hyper::{
body::Body, body::Incoming as IncomingBody, header::HeaderName, Method, Request, Response,
StatusCode,
};
use http_body_util::BodyExt;
use serde::{Deserialize, Serialize};
use garage_model::bucket_table::{Bucket, CorsRule as GarageCorsRule};
use crate::common_error::CommonError;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content;
use garage_model::bucket_table::{Bucket, BucketParams, CorsRule as GarageCorsRule};
use garage_model::garage::Garage;
use garage_util::data::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
pub async fn handle_get_cors(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
let ReqCtx { bucket_params, .. } = ctx;
if let Some(cors) = bucket_params.cors_config.get() {
@ -90,6 +99,154 @@ pub async fn handle_put_cors(
.body(empty_body())?)
}
pub async fn handle_options_api(
garage: Arc<Garage>,
req: &Request<IncomingBody>,
bucket_name: Option<String>,
) -> Result<Response<EmptyBody>, CommonError> {
// FIXME: CORS rules of buckets with local aliases are
// not taken into account.
// If the bucket name is a global bucket name,
// we try to apply the CORS rules of that bucket.
// If a user has a local bucket name that has
// the same name, its CORS rules won't be applied
// and will be shadowed by the rules of the globally
// existing bucket (but this is inevitable because
// OPTIONS calls are not auhtenticated).
if let Some(bn) = bucket_name {
let helper = garage.bucket_helper();
let bucket_id = helper.resolve_global_bucket_name(&bn).await?;
if let Some(id) = bucket_id {
let bucket = garage.bucket_helper().get_existing_bucket(id).await?;
let bucket_params = bucket.state.into_option().unwrap();
handle_options_for_bucket(req, &bucket_params)
} else {
// If there is a bucket name in the request, but that name
// does not correspond to a global alias for a bucket,
// then it's either a non-existing bucket or a local bucket.
// We have no way of knowing, because the request is not
// authenticated and thus we can't resolve local aliases.
// We take the permissive approach of allowing everything,
// because we don't want to prevent web apps that use
// local bucket names from making API calls.
Ok(Response::builder()
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "*")
.status(StatusCode::OK)
.body(EmptyBody::new())?)
}
} else {
// If there is no bucket name in the request,
// we are doing a ListBuckets call, which we want to allow
// for all origins.
Ok(Response::builder()
.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(ACCESS_CONTROL_ALLOW_METHODS, "GET")
.status(StatusCode::OK)
.body(EmptyBody::new())?)
}
}
pub fn handle_options_for_bucket(
req: &Request<IncomingBody>,
bucket_params: &BucketParams,
) -> Result<Response<EmptyBody>, CommonError> {
let origin = req
.headers()
.get("Origin")
.ok_or_bad_request("Missing Origin header")?
.to_str()?;
let request_method = req
.headers()
.get(ACCESS_CONTROL_REQUEST_METHOD)
.ok_or_bad_request("Missing Access-Control-Request-Method header")?
.to_str()?;
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
None => vec![],
};
if let Some(cors_config) = bucket_params.cors_config.get() {
let matching_rule = cors_config
.iter()
.find(|rule| cors_rule_matches(rule, origin, request_method, request_headers.iter()));
if let Some(rule) = matching_rule {
let mut resp = Response::builder()
.status(StatusCode::OK)
.body(EmptyBody::new())?;
add_cors_headers(&mut resp, rule).ok_or_internal_error("Invalid CORS configuration")?;
return Ok(resp);
}
}
Err(CommonError::Forbidden(
"This CORS request is not allowed.".into(),
))
}
pub fn find_matching_cors_rule<'a>(
bucket_params: &'a BucketParams,
req: &Request<impl Body>,
) -> Result<Option<&'a GarageCorsRule>, Error> {
if let Some(cors_config) = bucket_params.cors_config.get() {
if let Some(origin) = req.headers().get("Origin") {
let origin = origin.to_str()?;
let request_headers = match req.headers().get(ACCESS_CONTROL_REQUEST_HEADERS) {
Some(h) => h.to_str()?.split(',').map(|h| h.trim()).collect::<Vec<_>>(),
None => vec![],
};
return Ok(cors_config.iter().find(|rule| {
cors_rule_matches(rule, origin, req.method().as_ref(), request_headers.iter())
}));
}
}
Ok(None)
}
fn cors_rule_matches<'a, HI, S>(
rule: &GarageCorsRule,
origin: &'a str,
method: &'a str,
mut request_headers: HI,
) -> bool
where
HI: Iterator<Item = S>,
S: AsRef<str>,
{
rule.allow_origins.iter().any(|x| x == "*" || x == origin)
&& rule.allow_methods.iter().any(|x| x == "*" || x == method)
&& request_headers.all(|h| {
rule.allow_headers
.iter()
.any(|x| x == "*" || x == h.as_ref())
})
}
pub fn add_cors_headers(
resp: &mut Response<impl Body>,
rule: &GarageCorsRule,
) -> Result<(), http::header::InvalidHeaderValue> {
let h = resp.headers_mut();
h.insert(
ACCESS_CONTROL_ALLOW_ORIGIN,
rule.allow_origins.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_ALLOW_METHODS,
rule.allow_methods.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_ALLOW_HEADERS,
rule.allow_headers.join(", ").parse()?,
);
h.insert(
ACCESS_CONTROL_EXPOSE_HEADERS,
rule.expose_headers.join(", ").parse()?,
);
Ok(())
}
// ---- SERIALIZATION AND DESERIALIZATION TO/FROM S3 XML ----
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]

View file

@ -5,13 +5,12 @@ use garage_util::data::*;
use garage_model::s3::object_table::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::put::next_timestamp;
use crate::xml as s3_xml;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*;
use crate::s3::put::next_timestamp;
use crate::s3::xml as s3_xml;
use crate::signature::verify_signed_content;
async fn handle_delete_internal(ctx: &ReqCtx, key: &str) -> Result<(Uuid, Uuid), Error> {
let ReqCtx {

View file

@ -28,10 +28,9 @@ use garage_util::migrate::Migrate;
use garage_model::garage::Garage;
use garage_model::s3::object_table::{ObjectVersionEncryption, ObjectVersionMetaInner};
use garage_api_common::common_error::*;
use crate::checksum::Md5Checksum;
use crate::error::Error;
use crate::common_error::*;
use crate::s3::checksum::Md5Checksum;
use crate::s3::error::Error;
const X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM: HeaderName =
HeaderName::from_static("x-amz-server-side-encryption-customer-algorithm");

View file

@ -4,30 +4,19 @@ use err_derive::Error;
use hyper::header::HeaderValue;
use hyper::{HeaderMap, StatusCode};
use garage_model::helper::error::Error as HelperError;
pub(crate) use garage_api_common::common_error::pass_helper_error;
use garage_api_common::common_error::{
commonErrorDerivative, helper_error_as_internal, CommonError,
};
pub use garage_api_common::common_error::{
CommonErrorDerivative, OkOrBadRequest, OkOrInternalError,
};
use garage_api_common::generic_server::ApiError;
use garage_api_common::helpers::*;
use garage_api_common::signature::error::Error as SignatureError;
use crate::xml as s3_xml;
use crate::common_error::CommonError;
pub use crate::common_error::{CommonErrorDerivative, OkOrBadRequest, OkOrInternalError};
use crate::generic_server::ApiError;
use crate::helpers::*;
use crate::s3::xml as s3_xml;
use crate::signature::error::Error as SignatureError;
/// Errors of this crate
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)]
/// Error from common error
Common(#[error(source)] CommonError),
Common(CommonError),
// Category: cannot process
/// Authorization Header Malformed
@ -89,16 +78,17 @@ pub enum Error {
NotImplemented(String),
}
commonErrorDerivative!(Error);
// Helper errors are always passed as internal errors by default.
// To pass the specific error code back to the client, use `pass_helper_error`.
impl From<HelperError> for Error {
fn from(err: HelperError) -> Error {
Error::Common(helper_error_as_internal(err))
impl<T> From<T> for Error
where
CommonError: From<T>,
{
fn from(err: T) -> Self {
Error::Common(CommonError::from(err))
}
}
impl CommonErrorDerivative for Error {}
impl From<roxmltree::Error> for Error {
fn from(err: roxmltree::Error) -> Self {
Self::InvalidXml(format!("{}", err))

View file

@ -25,12 +25,11 @@ use garage_model::garage::Garage;
use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*;
use garage_api_common::helpers::*;
use crate::api_server::ResBody;
use crate::checksum::{add_checksum_response_headers, X_AMZ_CHECKSUM_MODE};
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::helpers::*;
use crate::s3::api_server::ResBody;
use crate::s3::checksum::{add_checksum_response_headers, X_AMZ_CHECKSUM_MODE};
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
const X_AMZ_MP_PARTS_COUNT: &str = "x-amz-mp-parts-count";
@ -69,11 +68,14 @@ fn object_headers(
// See: https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html
let mut headers_by_name = BTreeMap::new();
for (name, value) in meta_inner.headers.iter() {
let name_lower = name.to_ascii_lowercase();
headers_by_name
.entry(name_lower)
.or_insert(vec![])
.push(value.as_str());
match headers_by_name.get_mut(name) {
None => {
headers_by_name.insert(name, vec![value.as_str()]);
}
Some(headers) => {
headers.push(value.as_str());
}
}
}
for (name, values) in headers_by_name {

View file

@ -5,12 +5,11 @@ use hyper::{Request, Response, StatusCode};
use serde::{Deserialize, Serialize};
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content;
use garage_model::bucket_table::{
parse_lifecycle_date, Bucket, LifecycleExpiration as GarageLifecycleExpiration,

View file

@ -13,14 +13,13 @@ use garage_model::s3::object_table::*;
use garage_table::EnumerationOrder;
use garage_api_common::encoding::*;
use garage_api_common::helpers::*;
use crate::api_server::{ReqBody, ResBody};
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::multipart as s3_multipart;
use crate::xml as s3_xml;
use crate::encoding::*;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
use crate::s3::multipart as s3_multipart;
use crate::s3::xml as s3_xml;
const DUMMY_NAME: &str = "Dummy Key";
const DUMMY_KEY: &str = "GKDummyKey";

View file

@ -1,6 +1,3 @@
#[macro_use]
extern crate tracing;
pub mod api_server;
pub mod error;

View file

@ -15,15 +15,14 @@ use garage_model::s3::mpu_table::*;
use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::checksum::*;
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::put::*;
use crate::xml as s3_xml;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::checksum::*;
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
use crate::s3::put::*;
use crate::s3::xml as s3_xml;
use crate::signature::verify_signed_content;
// ----

View file

@ -16,16 +16,15 @@ use serde::Deserialize;
use garage_model::garage::Garage;
use garage_model::s3::object_table::*;
use garage_api_common::cors::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::payload::{verify_v4, Authorization};
use crate::api_server::ResBody;
use crate::checksum::*;
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::put::{get_headers, save_stream, ChecksumMode};
use crate::xml as s3_xml;
use crate::helpers::*;
use crate::s3::api_server::ResBody;
use crate::s3::checksum::*;
use crate::s3::cors::*;
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
use crate::s3::put::{get_headers, save_stream, ChecksumMode};
use crate::s3::xml as s3_xml;
use crate::signature::payload::{verify_v4, Authorization};
pub async fn handle_post_object(
garage: Arc<Garage>,
@ -108,8 +107,7 @@ pub async fn handle_post_object(
let bucket_id = garage
.bucket_helper()
.resolve_bucket(&bucket_name, &api_key)
.await
.map_err(pass_helper_error)?;
.await?;
if !api_key.allow_write(&bucket_id) {
return Err(Error::forbidden("Operation is not allowed for this key."));

View file

@ -30,12 +30,11 @@ use garage_model::s3::block_ref_table::*;
use garage_model::s3::object_table::*;
use garage_model::s3::version_table::*;
use garage_api_common::helpers::*;
use crate::api_server::{ReqBody, ResBody};
use crate::checksum::*;
use crate::encryption::EncryptionParams;
use crate::error::*;
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::checksum::*;
use crate::s3::encryption::EncryptionParams;
use crate::s3::error::*;
const PUT_BLOCKS_MAX_PARALLEL: usize = 3;
@ -623,7 +622,7 @@ pub(crate) fn get_headers(headers: &HeaderMap<HeaderValue>) -> Result<HeaderList
for (name, value) in headers.iter() {
if name.as_str().starts_with("x-amz-meta-") {
ret.push((
name.as_str().to_ascii_lowercase(),
name.to_string(),
std::str::from_utf8(value.as_bytes())?.to_string(),
));
}

View file

@ -3,10 +3,9 @@ use std::borrow::Cow;
use hyper::header::HeaderValue;
use hyper::{HeaderMap, Method, Request};
use garage_api_common::helpers::Authorization;
use garage_api_common::router_macros::{generateQueryParameters, router_match};
use crate::error::*;
use crate::helpers::Authorization;
use crate::router_macros::{generateQueryParameters, router_match};
use crate::s3::error::*;
router_match! {@func

View file

@ -4,16 +4,15 @@ use http_body_util::BodyExt;
use hyper::{Request, Response, StatusCode};
use serde::{Deserialize, Serialize};
use crate::helpers::*;
use crate::s3::api_server::{ReqBody, ResBody};
use crate::s3::error::*;
use crate::s3::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
use crate::signature::verify_signed_content;
use garage_model::bucket_table::*;
use garage_util::data::*;
use garage_api_common::helpers::*;
use garage_api_common::signature::verify_signed_content;
use crate::api_server::{ReqBody, ResBody};
use crate::error::*;
use crate::xml::{to_xml_with_header, xmlns_tag, IntValue, Value};
pub async fn handle_get_website(ctx: ReqCtx) -> Result<Response<ResBody>, Error> {
let ReqCtx { bucket_params, .. } = ctx;
if let Some(website) = bucket_params.website_config.get() {

View file

@ -1,7 +1,7 @@
use quick_xml::se::to_string;
use serde::{Deserialize, Serialize, Serializer};
use crate::error::Error as ApiError;
use crate::s3::error::Error as ApiError;
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();

View file

@ -518,7 +518,7 @@ impl Authorization {
})
}
pub fn parse_form(params: &HeaderMap) -> Result<Self, Error> {
pub(crate) fn parse_form(params: &HeaderMap) -> Result<Self, Error> {
let algorithm = params
.get(X_AMZ_ALGORITHM)
.ok_or_bad_request("Missing X-Amz-Algorithm header")?

View file

@ -34,8 +34,10 @@ async-compression.workspace = true
zstd.workspace = true
serde.workspace = true
serde_bytes.workspace = true
futures.workspace = true
futures-util.workspace = true
tokio.workspace = true
tokio-util.workspace = true

View file

@ -13,6 +13,7 @@ path = "lib.rs"
[dependencies]
err-derive.workspace = true
hexdump.workspace = true
tracing.workspace = true
heed = { workspace = true, optional = true }

View file

@ -144,12 +144,9 @@ impl IDb for SqliteDb {
let percent = (p.pagecount - p.remaining) * 100 / p.pagecount;
info!("Sqlite snapshot progress: {}%", percent);
}
std::fs::create_dir_all(to)?;
let mut path = to.clone();
path.push("db.sqlite");
self.db
.get()?
.backup(rusqlite::DatabaseName::Main, path, Some(progress))?;
.backup(rusqlite::DatabaseName::Main, to, Some(progress))?;
Ok(())
}

View file

@ -23,9 +23,7 @@ path = "tests/lib.rs"
[dependencies]
format_table.workspace = true
garage_db.workspace = true
garage_api_admin.workspace = true
garage_api_s3.workspace = true
garage_api_k2v = { workspace = true, optional = true }
garage_api.workspace = true
garage_block.workspace = true
garage_model.workspace = true
garage_net.workspace = true
@ -42,6 +40,7 @@ parse_duration.workspace = true
hex.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
rand.workspace = true
async-trait.workspace = true
sha1.workspace = true
sodiumoxide.workspace = true
@ -49,18 +48,21 @@ structopt.workspace = true
git-version.workspace = true
serde.workspace = true
serde_bytes.workspace = true
toml.workspace = true
futures.workspace = true
futures-util.workspace = true
tokio.workspace = true
opentelemetry.workspace = true
opentelemetry-prometheus = { workspace = true, optional = true }
opentelemetry-otlp = { workspace = true, optional = true }
prometheus = { workspace = true, optional = true }
syslog-tracing = { workspace = true, optional = true }
[dev-dependencies]
garage_api_common.workspace = true
aws-config.workspace = true
aws-sdk-s3.workspace = true
chrono.workspace = true
http.workspace = true
@ -82,7 +84,7 @@ k2v-client.workspace = true
[features]
default = [ "bundled-libs", "metrics", "lmdb", "sqlite", "k2v" ]
k2v = [ "garage_util/k2v", "garage_api_k2v" ]
k2v = [ "garage_util/k2v", "garage_api/k2v" ]
# Database engines
lmdb = [ "garage_model/lmdb" ]
@ -93,7 +95,7 @@ consul-discovery = [ "garage_rpc/consul-discovery" ]
# Automatic registration and discovery via Kubernetes API
kubernetes-discovery = [ "garage_rpc/kubernetes-discovery" ]
# Prometheus exporter (/metrics endpoint).
metrics = [ "garage_api_admin/metrics", "opentelemetry-prometheus" ]
metrics = [ "garage_api/metrics", "opentelemetry-prometheus", "prometheus" ]
# Exporter for the OpenTelemetry Collector.
telemetry-otlp = [ "opentelemetry-otlp" ]
# Logging to syslog

View file

@ -482,7 +482,7 @@ impl AdminRpcHandler {
AdminRpc::MetaOperation(MetaOperation::Snapshot { all: false }),
PRIO_NORMAL,
)
.await
.await?
}))
.await;
@ -495,7 +495,14 @@ impl AdminRpcHandler {
ret.push(format!("{:?}\t{}", to, res_str));
}
Ok(AdminRpc::Ok(format_table_to_string(ret)))
if resps.iter().any(|resp| match resp {
Err(_) => true,
Ok(_) => false,
}) {
Err(Error::BadRequest(format_table_to_string(ret)).into())
} else {
Ok(AdminRpc::Ok(format_table_to_string(ret)))
}
}
MetaOperation::Snapshot { all: false } => {
garage_model::snapshot::async_snapshot_metadata(&self.garage).await?;

View file

@ -6,13 +6,13 @@ use garage_util::background::*;
use garage_util::config::*;
use garage_util::error::Error;
use garage_api_admin::api_server::AdminApiServer;
use garage_api_s3::api_server::S3ApiServer;
use garage_api::admin::api_server::AdminApiServer;
use garage_api::s3::api_server::S3ApiServer;
use garage_model::garage::Garage;
use garage_web::WebServer;
#[cfg(feature = "k2v")]
use garage_api_k2v::api_server::K2VApiServer;
use garage_api::k2v::api_server::K2VApiServer;
use crate::admin::*;
use crate::secrets::{fill_secrets, Secrets};

View file

@ -15,7 +15,7 @@ use hyper_util::client::legacy::{connect::HttpConnector, Client};
use hyper_util::rt::TokioExecutor;
use super::garage::{Instance, Key};
use garage_api_common::signature;
use garage_api::signature;
pub type Body = FullBody<hyper::body::Bytes>;

View file

@ -29,11 +29,12 @@ tokio.workspace = true
# cli deps
clap = { workspace = true, optional = true }
format_table = { workspace = true, optional = true }
tracing = { workspace = true, optional = true }
tracing-subscriber = { workspace = true, optional = true }
[features]
cli = ["clap", "tokio/fs", "tokio/io-std", "tracing-subscriber", "format_table"]
cli = ["clap", "tokio/fs", "tokio/io-std", "tracing", "tracing-subscriber", "format_table"]
[lib]
path = "lib.rs"

View file

@ -22,6 +22,7 @@ garage_util.workspace = true
garage_net.workspace = true
async-trait.workspace = true
arc-swap.workspace = true
blake2.workspace = true
chrono.workspace = true
err-derive.workspace = true
@ -37,7 +38,9 @@ serde.workspace = true
serde_bytes.workspace = true
futures.workspace = true
futures-util.workspace = true
tokio.workspace = true
opentelemetry.workspace = true
[features]
default = [ "lmdb", "sqlite" ]

View file

@ -16,6 +16,8 @@ use serde::{Deserialize, Serialize};
use garage_util::data::*;
use crate::helper::error::{Error as HelperError, OkOrBadRequest};
/// Node IDs used in K2V are u64 integers that are the abbreviation
/// of full Garage node IDs which are 256-bit UUIDs.
pub type K2VNodeId = u64;
@ -97,6 +99,10 @@ impl CausalContext {
Some(ret)
}
pub fn parse_helper(s: &str) -> Result<Self, HelperError> {
Self::parse(s).ok_or_bad_request("Invalid causality token")
}
/// Check if this causal context contains newer items than another one
pub fn is_newer_than(&self, other: &Self) -> bool {
vclock_gt(&self.vector_clock, &other.vector_clock)

View file

@ -41,14 +41,8 @@ pub fn snapshot_metadata(garage: &Garage) -> Result<(), Error> {
}
};
let snapshots_dir = match &garage.config.metadata_snapshots_dir {
Some(d) => d.clone(),
None => {
let mut default_snapshots_dir = garage.config.metadata_dir.clone();
default_snapshots_dir.push("snapshots");
default_snapshots_dir
}
};
let mut snapshots_dir = garage.config.metadata_dir.clone();
snapshots_dir.push("snapshots");
fs::create_dir_all(&snapshots_dir)?;
let mut new_path = snapshots_dir.clone();

View file

@ -15,10 +15,12 @@ path = "lib.rs"
[dependencies]
format_table.workspace = true
garage_db.workspace = true
garage_util.workspace = true
garage_net.workspace = true
arc-swap.workspace = true
bytes.workspace = true
bytesize.workspace = true
gethostname.workspace = true
hex.workspace = true
@ -44,7 +46,9 @@ reqwest = { workspace = true, optional = true }
pnet_datalink.workspace = true
futures.workspace = true
futures-util.workspace = true
tokio.workspace = true
tokio-stream.workspace = true
opentelemetry.workspace = true
[features]

View file

@ -22,6 +22,7 @@ opentelemetry.workspace = true
async-trait.workspace = true
arc-swap.workspace = true
bytes.workspace = true
hex.workspace = true
hexdump.workspace = true
tracing.workspace = true

View file

@ -204,10 +204,6 @@ impl<F: TableSchema, R: TableReplication> Table<F, R> {
entries_vec.push((write_sets, e_enc));
}
if entries_vec.is_empty() {
return Ok(());
}
// Compute a deduplicated list of all of the write sets,
// and compute an index from each node to the position of the sets in which
// it takes part, to optimize the detection of a quorum.

View file

@ -20,7 +20,9 @@ garage_net.workspace = true
arc-swap.workspace = true
async-trait.workspace = true
blake2.workspace = true
bytes.workspace = true
bytesize.workspace = true
digest.workspace = true
err-derive.workspace = true
hexdump.workspace = true
xxhash-rust.workspace = true

View file

@ -31,9 +31,6 @@ pub struct Config {
#[serde(default)]
pub use_local_tz: bool,
/// Optional directory where metadata snapshots will be store
pub metadata_snapshots_dir: Option<PathBuf>,
/// Automatic snapshot interval for metadata
#[serde(default)]
pub metadata_auto_snapshot_interval: Option<String>,

View file

@ -14,8 +14,7 @@ path = "lib.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
garage_api_common.workspace = true
garage_api_s3.workspace = true
garage_api.workspace = true
garage_model.workspace = true
garage_util.workspace = true
garage_table.workspace = true
@ -24,9 +23,12 @@ err-derive.workspace = true
tracing.workspace = true
percent-encoding.workspace = true
futures.workspace = true
http.workspace = true
http-body-util.workspace = true
hyper.workspace = true
hyper-util.workspace = true
tokio.workspace = true

View file

@ -2,14 +2,14 @@ use err_derive::Error;
use hyper::header::HeaderValue;
use hyper::{HeaderMap, StatusCode};
use garage_api_common::generic_server::ApiError;
use garage_api::generic_server::ApiError;
/// Errors of this crate
#[derive(Debug, Error)]
pub enum Error {
/// An error received from the API crate
#[error(display = "API error: {}", _0)]
ApiError(garage_api_s3::error::Error),
ApiError(garage_api::s3::error::Error),
/// The file does not exist
#[error(display = "Not found")]
@ -22,10 +22,10 @@ pub enum Error {
impl<T> From<T> for Error
where
garage_api_s3::error::Error: From<T>,
garage_api::s3::error::Error: From<T>,
{
fn from(err: T) -> Self {
Error::ApiError(garage_api_s3::error::Error::from(err))
Error::ApiError(garage_api::s3::error::Error::from(err))
}
}

View file

@ -20,15 +20,13 @@ use opentelemetry::{
use crate::error::*;
use garage_api_common::cors::{
add_cors_headers, find_matching_cors_rule, handle_options_for_bucket,
};
use garage_api_common::generic_server::{server_loop, UnixListenerOn};
use garage_api_common::helpers::*;
use garage_api_s3::error::{
use garage_api::generic_server::{server_loop, UnixListenerOn};
use garage_api::helpers::*;
use garage_api::s3::cors::{add_cors_headers, find_matching_cors_rule, handle_options_for_bucket};
use garage_api::s3::error::{
CommonErrorDerivative, Error as ApiError, OkOrBadRequest, OkOrInternalError,
};
use garage_api_s3::get::{handle_get_without_ctx, handle_head_without_ctx};
use garage_api::s3::get::{handle_get_without_ctx, handle_head_without_ctx};
use garage_model::garage::Garage;