Compare commits

..

2 commits
main ... ws

Author SHA1 Message Date
Quentin 1cb3880966
Better doc 2022-05-10 11:31:25 +02:00
Quentin eabb52a6c0
Doc + example + fix domain/host bug 2022-05-10 11:29:17 +02:00
25 changed files with 1738 additions and 7736 deletions

2
.gitignore vendored
View file

@ -1,5 +1,3 @@
/target /target
result
result-bin
run_local.sh run_local.sh
dhat-heap.json dhat-heap.json

View file

@ -1,26 +0,0 @@
when:
event:
- push
- pull_request
- tag
- cron
- manual
steps:
- name: check formatting
image: nixpkgs/nix:nixos-22.05
environment:
NIX_PATH: 'nixpkgs=channel:nixos-22.05'
commands:
- nix-shell -p cargo -p rustfmt --run 'cargo fmt -- --check'
- name: build
image: nixpkgs/nix:nixos-22.05
commands:
- nix build --extra-experimental-features nix-command --extra-experimental-features flakes .#debug.x86_64-linux.tricot
- name: test
image: nixpkgs/nix:nixos-22.05
commands:
- nix build --extra-experimental-features nix-command --extra-experimental-features flakes .#test.x86_64-linux.tricot
- ./result-bin/bin/tricot-*

1907
Cargo.lock generated

File diff suppressed because it is too large Load diff

3514
Cargo.nix

File diff suppressed because it is too large Load diff

View file

@ -7,38 +7,34 @@ authors = ["Alex Auvolat <alex@adnab.me>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
anyhow = "1.0.66" anyhow = "1.0.28"
envy = "0.4" envy = "0.4"
futures = "0.3" futures = "0.3"
tracing = { version = "0.1.30" } log = "0.4"
tracing-subscriber = { version = "0.3", features = ["env-filter"] } pretty_env_logger = "0.4"
regex = "1" regex = "1"
reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls-manual-roots" ] } reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls-manual-roots" ] }
serde = { version = "1.0.149", features = ["derive"] } serde = { version = "1.0.107", features = ["derive"] }
serde_json = "1.0.89" serde_json = "1.0.53"
tokio = { version = "1.22", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } tokio = { version = "1.0", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] }
bytes = "1" bytes = "1"
acme-micro = "0.12" acme-micro = "0.12"
rustls = { version = "0.21", features = [ "dangerous_configuration" ] } rustls = { version = "0.20", features = [ "dangerous_configuration" ] }
rustls-pemfile = "1.0" rustls-pemfile = "0.2"
chrono = { version = "0.4", features = [ "serde" ] } chrono = { version = "0.4", features = [ "serde" ] }
hyper = { version = "0.14", features = [ "full" ] } hyper = { version = "0.14", features = [ "full" ] }
futures-util = "0.3" futures-util = "0.3"
tokio-rustls = "0.24" tokio-rustls = "0.23"
hyper-rustls = "0.24" hyper-rustls = "0.23"
http = "0.2" http = "0.2"
hyper-reverse-proxy = "0.4"
structopt = "0.3" structopt = "0.3"
glob = "0.3" glob = "0.3"
rcgen = "0.11" rcgen = "0.8"
accept-encoding-fork = "0.2.0-alpha.3" accept-encoding-fork = "0.2.0-alpha.3"
async-compression = { version = "0.4", features = ["tokio", "gzip", "zstd", "deflate", "brotli"] } async-compression = { version = "0.3", features = ["tokio", "gzip", "zstd", "deflate", "brotli"] }
tokio-util = { version = "0.7", features = ["io"] } tokio-util = { version = "0.6", features = ["io"] }
uuid = { version = "1.2", features = ["v4"] } uuid = { version = "0.8.2", features = ["v4"] }
opentelemetry = "0.20"
opentelemetry-prometheus = "0.13"
prometheus = "0.13"
df-consul = "0.3.5"
dhat = { version = "0.3", optional = true } dhat = { version = "0.3", optional = true }
[profile.release] [profile.release]

View file

@ -1,4 +1,4 @@
FROM rust:1.68-buster as builder FROM rust:1.58-buster as builder
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y libssl-dev pkg-config apt-get install -y libssl-dev pkg-config

View file

@ -1,7 +1,5 @@
# Tricot # Tricot
[![status-badge](https://woodpecker.deuxfleurs.fr/api/badges/36/status.svg)](https://woodpecker.deuxfleurs.fr/repos/36)
Tricot is a reverse-proxy for exposing your services via TLS that integrates well with Consul and Nomad. Tricot is a reverse-proxy for exposing your services via TLS that integrates well with Consul and Nomad.
Tricot does the following things: Tricot does the following things:
@ -29,8 +27,7 @@ Tricot uses the following environment variables for its configuration (they can
- `TRICOT_ENABLE_COMPRESSION` (default: `false`): whether to enable transparent compression of data coming back from backends - `TRICOT_ENABLE_COMPRESSION` (default: `false`): whether to enable transparent compression of data coming back from backends
- `TRICOT_COMPRESS_MIME_TYPES` (default: `text/html,text/plain,text/css,text/javascript,text/xml,application/javascript,application/json,application/xml,image/svg+xml,font/ttf`): comma-separated list of MIME types for which Tricot will do compression - `TRICOT_COMPRESS_MIME_TYPES` (default: `text/html,text/plain,text/css,text/javascript,text/xml,application/javascript,application/json,application/xml,image/svg+xml,font/ttf`): comma-separated list of MIME types for which Tricot will do compression
[Here is an example of how to run Tricot as a Nomad service](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/commit/1a11ff42029e0a6cb5f5b9c34043af9d6d52e5ab/cluster/prod/app/core/deploy/tricot.hcl) [Here is an example of how to run Tricot as a Nomad service](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/branch/main/app/frontend/deploy/frontend-tricot.hcl) (in this example it also integrates with [Diplonat](https://git.deuxfleurs.fr/Deuxfleurs/diplonat)).
(in this example it also integrates with [Diplonat](https://git.deuxfleurs.fr/Deuxfleurs/diplonat)).
## Adding and priorizing backends ## Adding and priorizing backends
@ -42,16 +39,15 @@ Backends are configured by adding tags of the following form to the services in
- `tricot myapp.example.com/path/to_subresource 10`: combining the previous two examples - `tricot myapp.example.com/path/to_subresource 10`: combining the previous two examples
- `tricot-https myapp.example.com`: same, but indicates that the backend service handling the request expects an HTTPS request and not an HTTP request. In this case, Tricot will do everything in its power to NOT verify the backend's TLS certificate (ignore self-signed certificate, ignore TLS hostname, etc). - `tricot-https myapp.example.com`: same, but indicates that the backend service handling the request expects an HTTPS request and not an HTTP request. In this case, Tricot will do everything in its power to NOT verify the backend's TLS certificate (ignore self-signed certificate, ignore TLS hostname, etc).
- `tricot-add-header Access-Control-Allow-Origin *`: add the `Access-Control-Allow-Origin: *` header to all of the HTTP responses when they are proxied back to the client - `tricot-add-header Access-Control-Allow-Origin *`: add the `Access-Control-Allow-Origin: *` header to all of the HTTP responses when they are proxied back to the client
- `tricot-add-redirect old.example.com/maybe_subpath new.example.com/new/subpath 301`: redirects paths that match the first pattern to the second pattern with the given HTTP status code. More info in [PR#10](https://git.deuxfleurs.fr/Deuxfleurs/tricot/pulls/10).
- `tricot-global-lb`: load-balance incoming requests to all matching backends - `tricot-global-lb`: load-balance incoming requests to all matching backends
- `tricot-site-lb`: load-balance incoming requests to all matching backends that are in the same site (geographical location); when site information about nodes is not available, this is equivalent to `tricot-global-lb` - `tricot-site-lb`: load-balance incoming requests to all matching backends that are in the same site (geographical location); when site information about nodes is not available, this is equivalent to `tricot-global-lb`
Any number of such rules can be combined freely. Any number of such rules can be combined freely.
[Here](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/commit/981294e3d7a180a3c08f8173dc652b73b6e2bd07/cluster/staging/app/dummy/deploy/dummy-nginx.hcl) [Here](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/branch/main/app/dummy/deploy/dummy-nginx.hcl)
[are](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/commit/1a11ff42029e0a6cb5f5b9c34043af9d6d52e5ab/cluster/prod/app/garage/deploy/garage.hcl) [are](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/branch/main/app/garage-staging/deploy/garage.hcl)
[some](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/commit/1a11ff42029e0a6cb5f5b9c34043af9d6d52e5ab/cluster/prod/app/matrix/deploy/im.hcl) [some](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/branch/main/app/im/deploy/im.hcl)
[examples](https://git.deuxfleurs.fr/Deuxfleurs/nixcfg/src/commit/1a11ff42029e0a6cb5f5b9c34043af9d6d52e5ab/cluster/prod/app/jitsi/deploy/jitsi.hcl). [examples](https://git.deuxfleurs.fr/Deuxfleurs/infrastructure/src/branch/main/app/jitsi/deploy/jitsi.hcl).
## How Tricot matches backends ## How Tricot matches backends
@ -72,6 +68,38 @@ Logs are the privileged place to get information about what Tricot is doing. You
- `RUST_LOG=tricot=debug`: Tricot will show for each request the backend to which it is routed. It will also show all of its interactions with Consul - `RUST_LOG=tricot=debug`: Tricot will show for each request the backend to which it is routed. It will also show all of its interactions with Consul
- `RUST_LOG=tricot=trace`: Tricot will show details such as the request's headers for all request at all stages of proxying. - `RUST_LOG=tricot=trace`: Tricot will show details such as the request's headers for all request at all stages of proxying.
## For developers
Build Tricot:
```bash
git clone https://git.deuxfleurs.fr/Deuxfleurs/tricot
cd tricot
cargo build
```
Start Tricot:
```bash
consul agent -dev
cargo run -- --letsencrypt-email you@example.com --http-bind-addr [::1]:8080 --https-bind-addr [::1]:4443
```
Register services:
```bash
cd examples/node
npm install
node server.mjs
consul services register -name=localhost -tag="tricot localhost" -address [::1] -port 3000
```
Test it:
```
node client.mjs
```
## License ## License

View file

@ -1,964 +0,0 @@
{
"__inputs": [
{
"name": "DS_DS_PROMETHEUS",
"label": "DS_PROMETHEUS",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.3.1"
},
{
"type": "panel",
"id": "piechart",
"name": "Pie chart",
"version": ""
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": false,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"max": 300,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "yellow",
"value": 100
},
{
"color": "red",
"value": 200
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 6,
"x": 0,
"y": 0
},
"id": 8,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Request rate",
"type": "gauge"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 6,
"x": 6,
"y": 0
},
"id": 14,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.2.6",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum(rate(https_request_proxy_duration_sum{job=\"tricot\"}[$__rate_interval])) / sum(rate(https_request_proxy_duration_count{job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "Average",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.5, sum(rate(https_request_proxy_duration_bucket[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "50%",
"range": true,
"refId": "D"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.90, sum(rate(https_request_proxy_duration_bucket[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "90%",
"range": true,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.99, sum(rate(https_request_proxy_duration_bucket[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "99%",
"range": true,
"refId": "C"
}
],
"title": "Response time",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
}
},
"mappings": [],
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 4,
"x": 12,
"y": 0
},
"id": 11,
"options": {
"displayLabels": [
"name"
],
"legend": {
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"pieType": "pie",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(service) (rate(https_requests_served{status_code=~\"2.+\", job=\"tricot\"}[$__range]))",
"instant": true,
"interval": "",
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{service}}",
"range": false,
"refId": "A"
}
],
"title": "Requests per service",
"type": "piechart"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 8,
"x": 16,
"y": 0
},
"id": 4,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"expr": "sum by(method, status_code) (rate(https_requests_served{status_code=~\"2.+\", job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{method}} {{status_code}}",
"range": true,
"refId": "A"
}
],
"title": "Status success",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 8
},
"id": 1,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Mean",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(service) (rate(https_requests_served{job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Requests served per service",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 8
},
"id": 7,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Max",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(method, status_code) (rate(https_requests_served{status_code!~\"2.+\", job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{method}} {{status_code}}",
"range": true,
"refId": "A"
}
],
"title": "Status != 200 OK",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 17,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 7,
"x": 0,
"y": 17
},
"id": 6,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"expr": "sum by(host) (proxy_config_entries{job=\"tricot\"})",
"instant": false,
"key": "Q-f0ebfca9-6429-43f7-8617-af76a4be3918-0",
"legendFormat": "{{host}}",
"range": true,
"refId": "A"
}
],
"title": "Backends per domain",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
}
},
"mappings": []
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 4,
"x": 7,
"y": 17
},
"id": 16,
"options": {
"legend": {
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"pieType": "donut",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"true\", same_node=\"true\"}[$__range]))",
"instant": true,
"legendFormat": "Local node",
"range": false,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"true\", same_node=\"false\"}[$__range]))",
"hide": false,
"instant": true,
"legendFormat": "Node in same site",
"range": false,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"false\"}[$__range]))",
"hide": false,
"instant": true,
"legendFormat": "Node in another site",
"range": false,
"refId": "C"
}
],
"title": "Requests served by",
"type": "piechart"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 13,
"x": 11,
"y": 17
},
"id": 2,
"options": {
"legend": {
"calcs": [
"min",
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Mean",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "builder",
"expr": "sum by(service, target_addr) (rate(https_requests_served{job=\"tricot\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{service}}@{{target_addr}}",
"range": true,
"refId": "A"
}
],
"title": "Requests served per backend server",
"type": "timeseries"
}
],
"refresh": "10s",
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-30m",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Tricot global",
"uid": "H6ChABK4k",
"version": 37,
"weekStart": ""
}

View file

@ -1,994 +0,0 @@
{
"__inputs": [
{
"name": "DS_DS_PROMETHEUS",
"label": "DS_PROMETHEUS",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.3.1"
},
{
"type": "panel",
"id": "piechart",
"name": "Pie chart",
"version": ""
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": false,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"max": 300,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "yellow",
"value": 100
},
{
"color": "red",
"value": 200
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 6,
"x": 0,
"y": 0
},
"id": 8,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\",service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Request rate",
"type": "gauge"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
}
},
"mappings": [],
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 6,
"x": 6,
"y": 0
},
"id": 11,
"options": {
"legend": {
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"pieType": "pie",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(method, status_code) (rate(https_requests_served{status_code=~\"2.+\", job=\"tricot\", service=\"$service\"}[$__range]))",
"instant": true,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{method}} {{status_code}}",
"range": false,
"refId": "A"
}
],
"title": "Status success",
"type": "piechart"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 0
},
"id": 4,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"expr": "sum by(method, status_code) (rate(https_requests_served{status_code=~\"2.+\", job=\"tricot\", service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{method}} {{status_code}}",
"range": true,
"refId": "A"
}
],
"title": "Status success",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 10,
"x": 0,
"y": 8
},
"id": 1,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Mean",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(target_addr) (rate(https_requests_served{job=\"tricot\",service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Requests served per backend",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 5,
"x": 10,
"y": 8
},
"id": 14,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.2.6",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum(rate(https_request_proxy_duration_sum{job=\"tricot\",service=\"$service\"}[$__rate_interval])) / sum(rate(https_request_proxy_duration_count{job=\"tricot\",service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "Average",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.5, sum(rate(https_request_proxy_duration_bucket{job=\"tricot\",service=\"$service\"}[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "50%",
"range": true,
"refId": "D"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.90, sum(rate(https_request_proxy_duration_bucket{job=\"tricot\",service=\"$service\"}[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "90%",
"range": true,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "histogram_quantile(0.99, sum(rate(https_request_proxy_duration_bucket{job=\"tricot\",service=\"$service\"}[$__rate_interval])) by (le))",
"hide": false,
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "99%",
"range": true,
"refId": "C"
}
],
"title": "Response time",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 9,
"x": 15,
"y": 8
},
"id": 7,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Max",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(method, status_code) (rate(https_requests_served{status_code!~\"2.+\", job=\"tricot\", service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{method}} {{status_code}}",
"range": true,
"refId": "A"
}
],
"title": "Status != 200 OK",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 10,
"x": 0,
"y": 17
},
"id": 16,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Mean",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.2.6",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by (target_addr)(rate(https_request_proxy_duration_sum{job=\"tricot\",service=\"$service\"}[$__rate_interval])) / sum by (target_addr)(rate(https_request_proxy_duration_count{job=\"tricot\",service=\"$service\"}[$__rate_interval]))",
"instant": false,
"interval": "",
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{target_addr}}",
"range": true,
"refId": "A"
}
],
"title": "Response time per backend",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
}
},
"mappings": []
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 4,
"x": 10,
"y": 17
},
"id": 18,
"options": {
"legend": {
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"pieType": "donut",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"true\", same_node=\"true\", service=\"$service\"}[$__range]))",
"instant": true,
"legendFormat": "Local node",
"range": false,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"true\", same_node=\"false\",service=\"$service\"}[$__range]))",
"hide": false,
"instant": true,
"legendFormat": "Node in same site",
"range": false,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum (rate(https_requests_served{job=\"tricot\", same_site=\"false\",service=\"$service\"}[$__range]))",
"hide": false,
"instant": true,
"legendFormat": "Node in another site",
"range": false,
"refId": "C"
}
],
"title": "Requests served by",
"type": "piechart"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 100,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 10,
"x": 14,
"y": 17
},
"id": 15,
"options": {
"legend": {
"calcs": [
"mean",
"max"
],
"displayMode": "table",
"placement": "right",
"showLegend": true,
"sortBy": "Max",
"sortDesc": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"editorMode": "code",
"exemplar": false,
"expr": "sum by(target_addr, method, status_code) (rate(https_requests_served{status_code!~\"2.+\", job=\"tricot\", service=\"$service\"}[$__rate_interval]))",
"instant": false,
"key": "Q-b2139746-a221-47de-a50b-fadc128d0021-0",
"legendFormat": "{{target_addr}} {{method}} {{status_code}}",
"range": true,
"refId": "A"
}
],
"title": "Status != 200 OK, per backend",
"type": "timeseries"
}
],
"refresh": "10s",
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"current": {},
"datasource": {
"type": "prometheus",
"uid": "${DS_DS_PROMETHEUS}"
},
"definition": "https_requests_served{job=\"tricot\"}",
"hide": 0,
"includeAll": false,
"multi": false,
"name": "service",
"options": [],
"query": {
"query": "https_requests_served{job=\"tricot\"}",
"refId": "StandardVariableQuery"
},
"refresh": 1,
"regex": "/service=\"([a-z0-9-]+)\"/",
"skipUrlSync": false,
"sort": 0,
"type": "query"
}
]
},
"time": {
"from": "now-30m",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Tricot per-service",
"uid": "u930OPFVz",
"version": 9,
"weekStart": ""
}

1
examples/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
node_modules

11
examples/node/client.mjs Normal file
View file

@ -0,0 +1,11 @@
import WebSocket from 'ws';
const u = 'wss://localhost:4443';
//const u = 'ws://localhost:3000';
const ws = new WebSocket(u, {
rejectUnauthorized: false,
});
ws.on('open', () => ws.send('something'))
ws.on('message', msg => console.log('received: %s', msg))

44
examples/node/package-lock.json generated Normal file
View file

@ -0,0 +1,44 @@
{
"name": "nodeserver",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "nodeserver",
"version": "1.0.0",
"license": "AGPL-3.0-or-later",
"dependencies": {
"ws": "^8.6.0"
}
},
"node_modules/ws": {
"version": "8.6.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.6.0.tgz",
"integrity": "sha512-AzmM3aH3gk0aX7/rZLYvjdvZooofDu3fFOzGqcSnQ1tOcTWwhM/o+q++E8mAyVVIyUdajrkzWUGftaVSDLn1bw==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
},
"dependencies": {
"ws": {
"version": "8.6.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.6.0.tgz",
"integrity": "sha512-AzmM3aH3gk0aX7/rZLYvjdvZooofDu3fFOzGqcSnQ1tOcTWwhM/o+q++E8mAyVVIyUdajrkzWUGftaVSDLn1bw==",
"requires": {}
}
}
}

View file

@ -0,0 +1,14 @@
{
"name": "nodeserver",
"version": "1.0.0",
"description": "",
"main": "server.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Quentin Dufour <quentin@deuxfleurs.fr>",
"license": "AGPL-3.0-or-later",
"dependencies": {
"ws": "^8.6.0"
}
}

9
examples/node/server.mjs Normal file
View file

@ -0,0 +1,9 @@
import { WebSocketServer } from 'ws';
const wss = new WebSocketServer({ port: 3000 });
wss.on('connection', ws =>
ws.on('message', msg => {
console.log('received: %s', msg)
ws.send(msg)
}))

View file

@ -1,151 +0,0 @@
{
"nodes": {
"cargo2nix": {
"inputs": {
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": "rust-overlay"
},
"locked": {
"lastModified": 1666087781,
"narHash": "sha256-trKVdjMZ8mNkGfLcY5LsJJGtdV3xJDZnMVrkFjErlcs=",
"owner": "Alexis211",
"repo": "cargo2nix",
"rev": "a7a61179b66054904ef6a195d8da736eaaa06c36",
"type": "github"
},
"original": {
"owner": "Alexis211",
"repo": "cargo2nix",
"rev": "a7a61179b66054904ef6a195d8da736eaaa06c36",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1650374568,
"narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "b4a34015c698c7793d592d66adbab377907a2be8",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"locked": {
"lastModified": 1659877975,
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1696234590,
"narHash": "sha256-mgOzQYTvaTT4bFopVOadlndy2RPwLy60rDjIWOGujwo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "f902cb49892d300ff15cb237e48aa1cad79d68c3",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1665657542,
"narHash": "sha256-mojxNyzbvmp8NtVtxqiHGhRfjCALLfk9i/Uup68Y5q8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a3073c49bc0163fea6a121c276f526837672b555",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a3073c49bc0163fea6a121c276f526837672b555",
"type": "github"
}
},
"root": {
"inputs": {
"cargo2nix": "cargo2nix",
"nixpkgs": "nixpkgs_2"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1682389182,
"narHash": "sha256-8t2nmFnH+8V48+IJsf8AK51ebXNlVbOSVYOpiqJKvJE=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "74f1a64dd28faeeb85ef081f32cad2989850322c",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -1,41 +0,0 @@
{
description = "A very basic flake";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/a3073c49bc0163fea6a121c276f526837672b555";
inputs.cargo2nix = {
# As of 2022-10-18: two small patches over unstable branch, one for clippy and one to fix feature detection
url = "github:Alexis211/cargo2nix/a7a61179b66054904ef6a195d8da736eaaa06c36";
# Rust overlay as of 2023-04-25
inputs.rust-overlay.url =
"github:oxalica/rust-overlay/74f1a64dd28faeeb85ef081f32cad2989850322c";
inputs.nixpkgs.follows = "nixpkgs";
};
outputs = { self, nixpkgs, cargo2nix }:
let
pkgs = import nixpkgs {
system = "x86_64-linux";
overlays = [ cargo2nix.overlays.default ];
};
packageFun = import ./Cargo.nix;
rustVersion = "1.68.0";
compile = args: compileMode:
let
packageSet = pkgs.rustBuilder.makePackageSet ({
inherit packageFun rustVersion;
} // args);
in
packageSet.workspace.tricot {
inherit compileMode;
};
in
{
test.x86_64-linux.tricot = compile { release = false; } "test";
debug.x86_64-linux.tricot = compile { release = false; } "build";
packages.x86_64-linux.tricot = compile { release = true; } "build";
packages.x86_64-linux.default = self.packages.x86_64-linux.tricot;
};
}

View file

@ -2,7 +2,7 @@ use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use chrono::{NaiveDate, Utc}; use chrono::{Date, NaiveDate, Utc};
use rustls::sign::CertifiedKey; use rustls::sign::CertifiedKey;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -54,7 +54,8 @@ impl Cert {
} }
pub fn is_old(&self) -> bool { pub fn is_old(&self) -> bool {
let today = Utc::now().date_naive(); let date = Date::<Utc>::from_utc(self.ser.date, Utc);
today - self.ser.date > chrono::Duration::days(self.ser.valid_days / 2) let today = Utc::today();
today - date > chrono::Duration::days(self.ser.valid_days / 2)
} }
} }

View file

@ -5,40 +5,31 @@ use std::time::{Duration, Instant};
use anyhow::Result; use anyhow::Result;
use chrono::Utc; use chrono::Utc;
use futures::{FutureExt, TryFutureExt}; use futures::{FutureExt, TryFutureExt};
use log::*;
use tokio::select; use tokio::select;
use tokio::sync::{mpsc, watch}; use tokio::sync::{mpsc, watch};
use tokio::task::block_in_place; use tokio::task::block_in_place;
use tracing::*;
use acme_micro::create_p384_key; use acme_micro::create_p384_key;
use acme_micro::{Directory, DirectoryUrl}; use acme_micro::{Directory, DirectoryUrl};
use rustls::sign::CertifiedKey; use rustls::sign::CertifiedKey;
use crate::cert::{Cert, CertSer}; use crate::cert::{Cert, CertSer};
use crate::consul::{self, Consul}; use crate::consul::*;
use crate::proxy_config::*; use crate::proxy_config::*;
pub struct CertStore { pub struct CertStore {
consul: Consul, consul: Consul,
node_name: String,
letsencrypt_email: String, letsencrypt_email: String,
certs: RwLock<HashMap<String, Arc<Cert>>>, certs: RwLock<HashMap<String, Arc<Cert>>>,
self_signed_certs: RwLock<HashMap<String, Arc<Cert>>>, self_signed_certs: RwLock<HashMap<String, Arc<Cert>>>,
rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>, rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>,
tx_need_cert: mpsc::UnboundedSender<String>, tx_need_cert: mpsc::UnboundedSender<String>,
} }
struct ProcessedDomains {
static_domains: HashSet<String>,
on_demand_domains: Vec<(glob::Pattern, Option<String>)>,
}
impl CertStore { impl CertStore {
pub fn new( pub fn new(
consul: Consul, consul: Consul,
node_name: String,
rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>, rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>,
letsencrypt_email: String, letsencrypt_email: String,
exit_on_err: impl Fn(anyhow::Error) + Send + 'static, exit_on_err: impl Fn(anyhow::Error) + Send + 'static,
@ -47,11 +38,10 @@ impl CertStore {
let cert_store = Arc::new(Self { let cert_store = Arc::new(Self {
consul, consul,
node_name,
letsencrypt_email,
certs: RwLock::new(HashMap::new()), certs: RwLock::new(HashMap::new()),
self_signed_certs: RwLock::new(HashMap::new()), self_signed_certs: RwLock::new(HashMap::new()),
rx_proxy_config, rx_proxy_config,
letsencrypt_email,
tx_need_cert: tx, tx_need_cert: tx,
}); });
@ -73,72 +63,46 @@ impl CertStore {
let mut rx_proxy_config = self.rx_proxy_config.clone(); let mut rx_proxy_config = self.rx_proxy_config.clone();
let mut t_last_check: HashMap<String, Instant> = HashMap::new(); let mut t_last_check: HashMap<String, Instant> = HashMap::new();
let mut proc_domains: Option<ProcessedDomains> = None;
loop { loop {
let domains = select! { let mut domains: HashSet<String> = HashSet::new();
// Refresh some internal states, schedule static_domains for renew
select! {
res = rx_proxy_config.changed() => { res = rx_proxy_config.changed() => {
if res.is_err() { if res.is_err() {
bail!("rx_proxy_config closed"); bail!("rx_proxy_config closed");
} }
let mut static_domains: HashSet<String> = HashSet::new();
let mut on_demand_domains: Vec<(glob::Pattern, Option<String>)> = vec![];
let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone(); let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
for ent in proxy_config.entries.iter() { for ent in proxy_config.entries.iter() {
// Eagerly generate certificates for domains that if let HostDescription::Hostname(domain) = &ent.host {
// are not patterns if let Some((host, _port)) = domain.split_once(':') {
match &ent.url_prefix.host { domains.insert(host.to_string());
HostDescription::Hostname(domain) => { } else {
if let Some((host, _port)) = domain.split_once(':') { domains.insert(domain.clone());
static_domains.insert(host.to_string()); }
} else {
static_domains.insert(domain.clone());
}
},
HostDescription::Pattern(pattern) => {
on_demand_domains.push((pattern.clone(), ent.on_demand_tls_ask.clone()));
},
} }
} }
// only static_domains are refreshed
proc_domains = Some(ProcessedDomains { static_domains: static_domains.clone(), on_demand_domains });
self.domain_validation(static_domains, proc_domains.as_ref()).await
} }
// renew static and on-demand domains
need_cert = rx_need_cert.recv() => { need_cert = rx_need_cert.recv() => {
match need_cert { match need_cert {
Some(dom) => { Some(dom) => {
let mut candidates: HashSet<String> = HashSet::new(); domains.insert(dom);
// collect certificates as much as possible
candidates.insert(dom);
while let Ok(dom2) = rx_need_cert.try_recv() { while let Ok(dom2) = rx_need_cert.try_recv() {
candidates.insert(dom2); domains.insert(dom2);
} }
self.domain_validation(candidates, proc_domains.as_ref()).await
} }
None => bail!("rx_need_cert closed"), None => bail!("rx_need_cert closed"),
} };
} }
}; }
// Now that we have our list of domains to check,
// actually do something
for dom in domains.iter() { for dom in domains.iter() {
// Exclude from the list domains that were checked less than 60
// seconds ago
match t_last_check.get(dom) { match t_last_check.get(dom) {
Some(t) if Instant::now() - *t < Duration::from_secs(60) => continue, Some(t) if Instant::now() - *t < Duration::from_secs(60) => continue,
_ => t_last_check.insert(dom.to_string(), Instant::now()), _ => t_last_check.insert(dom.to_string(), Instant::now()),
}; };
// Actual Let's Encrypt calls are done here (in sister function)
debug!("Checking cert for domain: {}", dom); debug!("Checking cert for domain: {}", dom);
if let Err(e) = self.check_cert(dom).await { if let Err(e) = self.check_cert(dom).await {
warn!("({}) Could not get certificate: {}", dom, e); warn!("({}) Could not get certificate: {}", dom, e);
@ -147,82 +111,18 @@ impl CertStore {
} }
} }
async fn domain_validation( fn get_cert_for_https(self: &Arc<Self>, domain: &str) -> Result<Arc<Cert>> {
&self, // Check if domain is authorized
candidates: HashSet<String>, if !self
maybe_proc_domains: Option<&ProcessedDomains>, .rx_proxy_config
) -> HashSet<String> { .borrow()
let mut domains: HashSet<String> = HashSet::new(); .entries
.iter()
// Handle initialization .any(|ent| ent.host.matches(domain))
let proc_domains = match maybe_proc_domains { {
None => { bail!("Domain {} should not have a TLS certificate.", domain);
warn!("Proxy config is not yet loaded, refusing all certificate generation");
return domains;
}
Some(proc) => proc,
};
// Filter certificates...
'outer: for candidate in candidates.into_iter() {
// Disallow obvious wrong domains...
if !candidate.contains('.') || candidate.ends_with(".local") {
warn!("{} is probably not a publicly accessible domain, skipping (a self-signed certificate will be used)", candidate);
continue;
}
// Try to register domain as a static domain
if proc_domains.static_domains.contains(&candidate) {
trace!("domain {} validated as static domain", candidate);
domains.insert(candidate);
continue;
}
// It's not a static domain, maybe an on-demand domain?
for (pattern, maybe_check_url) in proc_domains.on_demand_domains.iter() {
// check glob pattern
if pattern.matches(&candidate) {
// if no check url is set, accept domain as long as it matches the pattern
let check_url = match maybe_check_url {
None => {
trace!(
"domain {} validated on glob pattern {} only",
candidate,
pattern
);
domains.insert(candidate);
continue 'outer;
}
Some(url) => url,
};
// if a check url is set, call it
// -- avoid DDoSing a backend
tokio::time::sleep(Duration::from_secs(2)).await;
match self.on_demand_tls_ask(check_url, &candidate).await {
Ok(()) => {
trace!(
"domain {} validated on glob pattern {} and on check url {}",
candidate,
pattern,
check_url
);
domains.insert(candidate);
continue 'outer;
}
Err(e) => {
warn!("domain {} validation refused on glob pattern {} and on check url {} with error: {}", candidate, pattern, check_url, e);
}
}
}
}
} }
return domains;
}
/// This function is also in charge of the refresh of the domain names
fn get_cert_for_https(self: &Arc<Self>, domain: &str) -> Result<Arc<Cert>> {
// Check in local memory if it exists // Check in local memory if it exists
if let Some(cert) = self.certs.read().unwrap().get(domain) { if let Some(cert) = self.certs.read().unwrap().get(domain) {
if cert.is_old() { if cert.is_old() {
@ -242,57 +142,6 @@ impl CertStore {
self.gen_self_signed_certificate(domain) self.gen_self_signed_certificate(domain)
} }
pub async fn warmup_memory_store(self: &Arc<Self>) -> Result<()> {
let consul_certs = self
.consul
.kv_get_prefix("certs/", None)
.await?
.into_inner();
trace!(
"Fetched {} certificate entries from Consul",
consul_certs.len()
);
let mut loaded_certs: usize = 0;
for (key, cert) in consul_certs {
let certser: CertSer = match serde_json::from_slice(&cert) {
Ok(cs) => cs,
Err(e) => {
warn!("Could not deserialize CertSer for {key}: {e}");
continue;
}
};
let domain = certser.hostname.clone();
let cert = match Cert::new(certser) {
Ok(c) => c,
Err(e) => {
warn!("Could not create Cert from CertSer for domain {domain}: {e}");
continue;
}
};
self.certs
.write()
.unwrap()
.insert(domain.to_string(), Arc::new(cert));
debug!("({domain}) Certificate loaded from Consul to the Memory Store");
loaded_certs += 1;
}
info!("Memory store warmed up with {loaded_certs} certificates");
Ok(())
}
/// Check certificate ensure that the certificate is in the memory store
/// and that it does not need to be renewed.
///
/// If it's not in the memory store, it tries to load it from Consul,
/// if it's not in Consul, it calls Let's Encrypt.
///
/// If the certificate is outdated in the memory store, it tries to load
/// a more recent version in Consul, if the Consul version is also outdated,
/// it tries to renew it
pub async fn check_cert(self: &Arc<Self>, domain: &str) -> Result<()> { pub async fn check_cert(self: &Arc<Self>, domain: &str) -> Result<()> {
// First, try locally. // First, try locally.
{ {
@ -310,43 +159,41 @@ impl CertStore {
.kv_get_json::<CertSer>(&format!("certs/{}", domain)) .kv_get_json::<CertSer>(&format!("certs/{}", domain))
.await? .await?
{ {
match Cert::new(consul_cert) { if let Ok(cert) = Cert::new(consul_cert) {
Ok(cert) => { let cert = Arc::new(cert);
let cert = Arc::new(cert); self.certs
self.certs .write()
.write() .unwrap()
.unwrap() .insert(domain.to_string(), cert.clone());
.insert(domain.to_string(), cert.clone()); if !cert.is_old() {
debug!("({domain}) Certificate loaded from Consul to the Memory Store"); return Ok(());
if !cert.is_old() {
return Ok(());
}
} }
Err(e) => { }
warn!("Could not create Cert from CertSer for domain {domain}: {e}");
}
};
} }
// Third, ask from Let's Encrypt // Third, ask from Let's Encrypt
self.renew_cert(domain).await self.renew_cert(domain).await
} }
/// This is the place where certificates are generated or renewed
pub async fn renew_cert(self: &Arc<Self>, domain: &str) -> Result<()> { pub async fn renew_cert(self: &Arc<Self>, domain: &str) -> Result<()> {
info!("({}) Renewing certificate", domain); info!("({}) Renewing certificate", domain);
// Basic sanity check (we could add more kinds of checks here)
// This is just to help avoid getting rate-limited against ACME server
if !domain.contains('.') || domain.ends_with(".local") {
bail!("Probably not a publicly accessible domain, skipping (a self-signed certificate will be used)");
}
// ---- Acquire lock ---- // ---- Acquire lock ----
// the lock is acquired for half an hour, // the lock is acquired for half an hour,
// so that in case of an error we won't retry before // so that in case of an error we won't retry before
// that delay expires // that delay expires
let lock_path = format!("renew_lock/{}", domain); let lock_path = format!("renew_lock/{}", domain);
let lock_name = format!("tricot/renew:{}@{}", domain, self.node_name); let lock_name = format!("tricot/renew:{}@{}", domain, self.consul.local_node.clone());
let session = self let session = self
.consul .consul
.create_session(&consul::locking::SessionRequest { .create_session(&ConsulSessionRequest {
name: lock_name.clone(), name: lock_name.clone(),
node: None, node: None,
lock_delay: Some("30m".into()), lock_delay: Some("30m".into()),
@ -431,7 +278,7 @@ impl CertStore {
let certser = CertSer { let certser = CertSer {
hostname: domain.to_string(), hostname: domain.to_string(),
date: Utc::now().date_naive(), date: Utc::today().naive_utc(),
valid_days: cert.valid_days_left()?, valid_days: cert.valid_days_left()?,
key_pem, key_pem,
cert_pem, cert_pem,
@ -452,19 +299,6 @@ impl CertStore {
Ok(()) Ok(())
} }
async fn on_demand_tls_ask(&self, check_url: &str, domain: &str) -> Result<()> {
let httpcli = reqwest::Client::new();
let chall_url = format!("{}?domain={}", check_url, domain);
info!("({}) On-demand TLS check", domain);
let httpresp = httpcli.get(&chall_url).send().await?;
if httpresp.status() != reqwest::StatusCode::OK {
bail!("{} is not authorized for on-demand TLS", domain);
}
Ok(())
}
async fn check_domain_accessibility(&self, domain: &str, session: &str) -> Result<()> { async fn check_domain_accessibility(&self, domain: &str, session: &str) -> Result<()> {
// Returns Ok(()) only if domain is a correct domain name that // Returns Ok(()) only if domain is a correct domain name that
// redirects to this server // redirects to this server
@ -511,7 +345,7 @@ impl CertStore {
let certser = CertSer { let certser = CertSer {
hostname: domain.to_string(), hostname: domain.to_string(),
date: Utc::now().date_naive(), date: Utc::today().naive_utc(),
valid_days: 1024, valid_days: 1024,
key_pem: cert.serialize_private_key_pem(), key_pem: cert.serialize_private_key_pem(),
cert_pem: cert.serialize_pem()?, cert_pem: cert.serialize_pem()?,

245
src/consul.rs Normal file
View file

@ -0,0 +1,245 @@
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use anyhow::{bail, Result};
use bytes::Bytes;
use log::*;
use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
pub struct ConsulConfig {
pub addr: String,
pub ca_cert: Option<String>,
pub client_cert: Option<String>,
pub client_key: Option<String>,
}
// ---- Watch and retrieve Consul catalog ----
//
#[derive(Serialize, Deserialize, Debug)]
pub struct ConsulNode {
#[serde(rename = "Node")]
pub node: String,
#[serde(rename = "Address")]
pub address: String,
#[serde(rename = "Meta")]
pub meta: HashMap<String, String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ConsulServiceEntry {
#[serde(rename = "Service")]
pub service: String,
#[serde(rename = "Address")]
pub address: String,
#[serde(rename = "Port")]
pub port: u16,
#[serde(rename = "Tags")]
pub tags: Vec<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ConsulNodeCatalog {
#[serde(rename = "Node")]
pub node: ConsulNode,
#[serde(rename = "Services")]
pub services: HashMap<String, ConsulServiceEntry>,
}
// ---- Consul session management ----
#[derive(Serialize, Deserialize, Debug)]
pub struct ConsulSessionRequest {
#[serde(rename = "Name")]
pub name: String,
#[serde(rename = "Node")]
pub node: Option<String>,
#[serde(rename = "LockDelay")]
pub lock_delay: Option<String>,
#[serde(rename = "TTL")]
pub ttl: Option<String>,
#[serde(rename = "Behavior")]
pub behavior: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ConsulSessionResponse {
#[serde(rename = "ID")]
pub id: String,
}
#[derive(Clone)]
pub struct Consul {
client: reqwest::Client,
url: String,
kv_prefix: String,
pub local_node: String,
}
impl Consul {
pub fn new(config: ConsulConfig, kv_prefix: &str, local_node: &str) -> Result<Self> {
let client = match (&config.ca_cert, &config.client_cert, &config.client_key) {
(Some(ca_cert), Some(client_cert), Some(client_key)) => {
let mut ca_cert_buf = vec![];
File::open(ca_cert)?.read_to_end(&mut ca_cert_buf)?;
let mut client_cert_buf = vec![];
File::open(client_cert)?.read_to_end(&mut client_cert_buf)?;
let mut client_key_buf = vec![];
File::open(client_key)?.read_to_end(&mut client_key_buf)?;
reqwest::Client::builder()
.use_rustls_tls()
.add_root_certificate(reqwest::Certificate::from_pem(&ca_cert_buf[..])?)
.identity(reqwest::Identity::from_pem(
&[&client_cert_buf[..], &client_key_buf[..]].concat()[..],
)?)
.build()?
}
(None, None, None) => reqwest::Client::new(),
_ => bail!("Incomplete Consul TLS configuration parameters"),
};
Ok(Self {
client,
url: config.addr.trim_end_matches('/').to_string(),
kv_prefix: kv_prefix.to_string(),
local_node: local_node.into(),
})
}
pub async fn list_nodes(&self) -> Result<Vec<ConsulNode>> {
debug!("list_nodes");
let url = format!("{}/v1/catalog/nodes", self.url);
let http = self.client.get(&url).send().await?;
let resp: Vec<ConsulNode> = http.json().await?;
Ok(resp)
}
pub async fn watch_node(
&self,
host: &str,
idx: Option<usize>,
) -> Result<(ConsulNodeCatalog, usize)> {
debug!("watch_node {} {:?}", host, idx);
let url = match idx {
Some(i) => format!("{}/v1/catalog/node/{}?index={}", self.url, host, i),
None => format!("{}/v1/catalog/node/{}", self.url, host),
};
let http = self.client.get(&url).send().await?;
let new_idx = match http.headers().get("X-Consul-Index") {
Some(v) => v.to_str()?.parse::<usize>()?,
None => bail!("X-Consul-Index header not found"),
};
let resp: ConsulNodeCatalog = http.json().await?;
Ok((resp, new_idx))
}
// ---- KV get and put ----
pub async fn kv_get(&self, key: &str) -> Result<Option<Bytes>> {
debug!("kv_get {}", key);
let url = format!("{}/v1/kv/{}{}?raw", self.url, self.kv_prefix, key);
let http = self.client.get(&url).send().await?;
match http.status() {
StatusCode::OK => Ok(Some(http.bytes().await?)),
StatusCode::NOT_FOUND => Ok(None),
_ => Err(anyhow!(
"Consul request failed: {:?}",
http.error_for_status()
)),
}
}
pub async fn kv_get_json<T: for<'de> Deserialize<'de>>(&self, key: &str) -> Result<Option<T>> {
debug!("kv_get_json {}", key);
let url = format!("{}/v1/kv/{}{}?raw", self.url, self.kv_prefix, key);
let http = self.client.get(&url).send().await?;
match http.status() {
StatusCode::OK => Ok(Some(http.json().await?)),
StatusCode::NOT_FOUND => Ok(None),
_ => Err(anyhow!(
"Consul request failed: {:?}",
http.error_for_status()
)),
}
}
pub async fn kv_put(&self, key: &str, bytes: Bytes) -> Result<()> {
debug!("kv_put {}", key);
let url = format!("{}/v1/kv/{}{}", self.url, self.kv_prefix, key);
let http = self.client.put(&url).body(bytes).send().await?;
http.error_for_status()?;
Ok(())
}
pub async fn kv_put_json<T: Serialize>(&self, key: &str, value: &T) -> Result<()> {
debug!("kv_put_json {}", key);
let url = format!("{}/v1/kv/{}{}", self.url, self.kv_prefix, key);
let http = self.client.put(&url).json(value).send().await?;
http.error_for_status()?;
Ok(())
}
pub async fn kv_delete(&self, key: &str) -> Result<()> {
let url = format!("{}/v1/kv/{}{}", self.url, self.kv_prefix, key);
let http = self.client.delete(&url).send().await?;
http.error_for_status()?;
Ok(())
}
// ---- Locking ----
pub async fn create_session(&self, req: &ConsulSessionRequest) -> Result<String> {
debug!("create_session {:?}", req);
let url = format!("{}/v1/session/create", self.url);
let http = self.client.put(&url).json(req).send().await?;
let resp: ConsulSessionResponse = http.json().await?;
Ok(resp.id)
}
pub async fn acquire(&self, key: &str, bytes: Bytes, session: &str) -> Result<bool> {
debug!("acquire {}", key);
let url = format!(
"{}/v1/kv/{}{}?acquire={}",
self.url, self.kv_prefix, key, session
);
let http = self.client.put(&url).body(bytes).send().await?;
let resp: bool = http.json().await?;
Ok(resp)
}
pub async fn release(&self, key: &str, bytes: Bytes, session: &str) -> Result<()> {
debug!("release {}", key);
let url = format!(
"{}/v1/kv/{}{}?release={}",
self.url, self.kv_prefix, key, session
);
let http = self.client.put(&url).body(bytes).send().await?;
http.error_for_status()?;
Ok(())
}
}

View file

@ -2,7 +2,7 @@ use std::net::SocketAddr;
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use tracing::*; use log::*;
use futures::future::Future; use futures::future::Future;
use http::uri::Authority; use http::uri::Authority;

View file

@ -1,10 +1,10 @@
use std::convert::Infallible; use std::convert::Infallible;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::sync::{atomic::Ordering, Arc}; use std::sync::{atomic::Ordering, Arc};
use std::time::{Duration, Instant}; use std::time::Duration;
use anyhow::Result; use anyhow::Result;
use tracing::*; use log::*;
use accept_encoding_fork::Encoding; use accept_encoding_fork::Encoding;
use async_compression::tokio::bufread::*; use async_compression::tokio::bufread::*;
@ -21,10 +21,8 @@ use tokio::sync::watch;
use tokio_rustls::TlsAcceptor; use tokio_rustls::TlsAcceptor;
use tokio_util::io::{ReaderStream, StreamReader}; use tokio_util::io::{ReaderStream, StreamReader};
use opentelemetry::{metrics, KeyValue};
use crate::cert_store::{CertStore, StoreResolver}; use crate::cert_store::{CertStore, StoreResolver};
use crate::proxy_config::{HostDescription, ProxyConfig, ProxyEntry}; use crate::proxy_config::ProxyConfig;
use crate::reverse_proxy; use crate::reverse_proxy;
const MAX_CONNECTION_LIFETIME: Duration = Duration::from_secs(24 * 3600); const MAX_CONNECTION_LIFETIME: Duration = Duration::from_secs(24 * 3600);
@ -33,15 +31,6 @@ pub struct HttpsConfig {
pub bind_addr: SocketAddr, pub bind_addr: SocketAddr,
pub enable_compression: bool, pub enable_compression: bool,
pub compress_mime_types: Vec<String>, pub compress_mime_types: Vec<String>,
// used internally to convert Instants to u64
pub time_origin: Instant,
}
struct HttpsMetrics {
requests_received: metrics::Counter<u64>,
requests_served: metrics::Counter<u64>,
request_proxy_duration: metrics::Histogram<f64>,
} }
pub async fn serve_https( pub async fn serve_https(
@ -52,22 +41,6 @@ pub async fn serve_https(
) -> Result<()> { ) -> Result<()> {
let config = Arc::new(config); let config = Arc::new(config);
let meter = opentelemetry::global::meter("tricot");
let metrics = Arc::new(HttpsMetrics {
requests_received: meter
.u64_counter("https_requests_received")
.with_description("Total number of requests received over HTTPS")
.init(),
requests_served: meter
.u64_counter("https_requests_served")
.with_description("Total number of requests served over HTTPS")
.init(),
request_proxy_duration: meter
.f64_histogram("https_request_proxy_duration")
.with_description("Duration between time when request was received, and time when backend returned status code and headers")
.init(),
});
let mut tls_cfg = rustls::ServerConfig::builder() let mut tls_cfg = rustls::ServerConfig::builder()
.with_safe_defaults() .with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
@ -98,31 +71,20 @@ pub async fn serve_https(
let rx_proxy_config = rx_proxy_config.clone(); let rx_proxy_config = rx_proxy_config.clone();
let tls_acceptor = tls_acceptor.clone(); let tls_acceptor = tls_acceptor.clone();
let config = config.clone(); let config = config.clone();
let metrics = metrics.clone();
let mut must_exit_2 = must_exit.clone(); let mut must_exit_2 = must_exit.clone();
let conn = tokio::spawn(async move { let conn = tokio::spawn(async move {
match tls_acceptor.accept(socket).await { match tls_acceptor.accept(socket).await {
Ok(stream) => { Ok(stream) => {
debug!("TLS handshake was successfull"); debug!("TLS handshake was successfull");
let http_conn = Http::new() let http_conn = Http::new().serve_connection(
.serve_connection( stream,
stream, service_fn(move |req: Request<Body>| {
service_fn(move |req: Request<Body>| { let https_config = config.clone();
let https_config = config.clone(); let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
let proxy_config: Arc<ProxyConfig> = handle_outer(remote_addr, req, https_config, proxy_config)
rx_proxy_config.borrow().clone(); }),
let metrics = metrics.clone(); );
handle_request(
remote_addr,
req,
https_config,
proxy_config,
metrics,
)
}),
)
.with_upgrades();
let timeout = tokio::time::sleep(MAX_CONNECTION_LIFETIME); let timeout = tokio::time::sleep(MAX_CONNECTION_LIFETIME);
tokio::pin!(http_conn, timeout); tokio::pin!(http_conn, timeout);
let http_result = loop { let http_result = loop {
@ -154,89 +116,58 @@ pub async fn serve_https(
Ok(()) Ok(())
} }
async fn handle_request( async fn handle_outer(
remote_addr: SocketAddr, remote_addr: SocketAddr,
req: Request<Body>, req: Request<Body>,
https_config: Arc<HttpsConfig>, https_config: Arc<HttpsConfig>,
proxy_config: Arc<ProxyConfig>, proxy_config: Arc<ProxyConfig>,
metrics: Arc<HttpsMetrics>,
) -> Result<Response<Body>, Infallible> { ) -> Result<Response<Body>, Infallible> {
let method_tag = KeyValue::new("method", req.method().to_string()); match handle(remote_addr, req, https_config, proxy_config).await {
Err(e) => {
// The host tag is only included in the requests_received metric, warn!("Handler error: {}", e);
// as for other metrics it can easily lead to cardinality explosions. Ok(Response::builder()
let host_tag = KeyValue::new( .status(StatusCode::INTERNAL_SERVER_ERROR)
"host", .body(Body::from(format!("{}", e)))
req.uri() .unwrap())
.authority() }
.map(|auth| auth.to_string()) Ok(r) => Ok(r),
.or_else(|| { }
req.headers()
.get("host")
.map(|host| host.to_str().unwrap_or_default().to_string())
})
.unwrap_or_default(),
);
metrics
.requests_received
.add(1, &[host_tag, method_tag.clone()]);
let mut tags = vec![method_tag];
let resp = select_target_and_proxy(
&https_config,
&proxy_config,
&metrics,
remote_addr,
req,
&mut tags,
)
.await;
tags.push(KeyValue::new("status_code", resp.status().as_u16() as i64));
metrics.requests_served.add(1, &tags);
Ok(resp)
} }
// Custom echo service, handling two different routes and a // Custom echo service, handling two different routes and a
// catch-all 404 responder. // catch-all 404 responder.
async fn select_target_and_proxy( async fn handle(
https_config: &HttpsConfig,
proxy_config: &ProxyConfig,
metrics: &HttpsMetrics,
remote_addr: SocketAddr, remote_addr: SocketAddr,
req: Request<Body>, req: Request<Body>,
tags: &mut Vec<KeyValue>, https_config: Arc<HttpsConfig>,
) -> Response<Body> { proxy_config: Arc<ProxyConfig>,
let received_time = Instant::now(); ) -> Result<Response<Body>, anyhow::Error> {
let method = req.method().clone(); let method = req.method().clone();
let uri = req.uri().to_string(); let uri = req.uri().to_string();
let host = if let Some(auth) = req.uri().authority() { let host = if let Some(auth) = req.uri().authority() {
auth.as_str() auth.as_str()
} else { } else {
match req.headers().get("host").and_then(|x| x.to_str().ok()) { req.headers()
Some(host) => host, .get("host")
None => { .ok_or_else(|| anyhow!("Missing host header"))?
return Response::builder() .to_str()?
.status(StatusCode::BAD_REQUEST)
.body(Body::from("Missing Host header"))
.unwrap();
}
}
}; };
let domain = match host.split_once(':') {
Some((domain, _port)) => domain,
_ => host,
};
debug!("Matching on domain {}", domain);
let path = req.uri().path(); let path = req.uri().path();
let accept_encoding = accept_encoding_fork::encodings(req.headers()).unwrap_or_else(|_| vec![]);
let best_match = proxy_config let best_match = proxy_config
.entries .entries
.iter() .iter()
.filter(|ent| { .filter(|ent| {
ent.flags.healthy ent.host.matches(domain)
&& ent.url_prefix.host.matches(host)
&& ent && ent
.url_prefix
.path_prefix .path_prefix
.as_ref() .as_ref()
.map(|prefix| path.starts_with(prefix)) .map(|prefix| path.starts_with(prefix))
@ -245,147 +176,66 @@ async fn select_target_and_proxy(
.max_by_key(|ent| { .max_by_key(|ent| {
( (
ent.priority, ent.priority,
ent.url_prefix ent.path_prefix
.path_prefix
.as_ref() .as_ref()
.map(|x| x.len() as i32) .map(|x| x.len() as i32)
.unwrap_or(0), .unwrap_or(0),
(ent.flags.same_node || ent.flags.site_lb || ent.flags.global_lb), ent.same_node,
(ent.flags.same_site || ent.flags.global_lb), ent.same_site,
-ent.calls_in_progress.load(Ordering::SeqCst), -ent.calls.load(Ordering::SeqCst),
-ent.last_call.load(Ordering::SeqCst),
) )
}); });
if let Some(proxy_to) = best_match { if let Some(proxy_to) = best_match {
tags.push(KeyValue::new("service", proxy_to.service_name.clone())); proxy_to.calls.fetch_add(1, Ordering::SeqCst);
tags.push(KeyValue::new(
"target_addr",
proxy_to.target_addr.to_string(),
));
tags.push(KeyValue::new("same_node", proxy_to.flags.same_node));
tags.push(KeyValue::new("same_site", proxy_to.flags.same_site));
proxy_to.last_call.fetch_max(
(received_time - https_config.time_origin).as_millis() as i64,
Ordering::Relaxed,
);
proxy_to.calls_in_progress.fetch_add(1, Ordering::SeqCst);
// Forward to backend
debug!("{}{} -> {}", host, path, proxy_to); debug!("{}{} -> {}", host, path, proxy_to);
trace!("Request: {:?}", req); trace!("Request: {:?}", req);
let response = if let Some(http_res) = try_redirect(host, path, proxy_to) { let mut response = if proxy_to.https_target {
// redirection middleware let to_addr = format!("https://{}", proxy_to.target_addr);
http_res handle_error(reverse_proxy::call_https(remote_addr.ip(), &to_addr, req).await)
} else { } else {
// proxying to backend let to_addr = format!("http://{}", proxy_to.target_addr);
match do_proxy(https_config, remote_addr, req, proxy_to).await { handle_error(reverse_proxy::call(remote_addr.ip(), &to_addr, req).await)
Ok(resp) => resp,
Err(e) => Response::builder()
.status(StatusCode::BAD_GATEWAY)
.body(Body::from(format!("Proxy error: {}", e)))
.unwrap(),
}
}; };
proxy_to.calls_in_progress.fetch_sub(1, Ordering::SeqCst); if response.status().is_success() {
metrics // (TODO: maybe we want to add these headers even if it's not a success?)
.request_proxy_duration for (header, value) in proxy_to.add_headers.iter() {
.record(received_time.elapsed().as_secs_f64(), tags); response.headers_mut().insert(
HeaderName::from_bytes(header.as_bytes())?,
HeaderValue::from_str(value)?,
);
}
}
if https_config.enable_compression {
response =
try_compress(response, method.clone(), accept_encoding, &https_config).await?
};
trace!("Final response: {:?}", response); trace!("Final response: {:?}", response);
info!("{} {} {}", method, response.status().as_u16(), uri); info!("{} {} {}", method, response.status().as_u16(), uri);
response Ok(response)
} else { } else {
debug!("{}{} -> NOT FOUND", host, path); debug!("{}{} -> NOT FOUND", host, path);
info!("{} 404 {}", method, uri); info!("{} 404 {}", method, uri);
Response::builder() Ok(Response::builder()
.status(StatusCode::NOT_FOUND) .status(StatusCode::NOT_FOUND)
.body(Body::from("No matching proxy entry")) .body(Body::from("No matching proxy entry"))?)
.unwrap()
} }
} }
fn try_redirect(req_host: &str, req_path: &str, proxy_to: &ProxyEntry) -> Option<Response<Body>> { fn handle_error(resp: Result<Response<Body>>) -> Response<Body> {
let maybe_redirect = proxy_to.redirects.iter().find(|(src, _, _)| { match resp {
let mut matched: bool = src.host.matches(req_host); Ok(resp) => resp,
Err(e) => Response::builder()
if let Some(path) = &src.path_prefix { .status(StatusCode::BAD_GATEWAY)
matched &= req_path.starts_with(path); .body(Body::from(format!("Proxy error: {}", e)))
} .unwrap(),
matched
});
let (src_prefix, dst_prefix, code) = match maybe_redirect {
None => return None,
Some(redirect) => redirect,
};
let new_host = match &dst_prefix.host {
HostDescription::Hostname(h) => h,
_ => unreachable!(), // checked when ProxyEntry is created
};
let new_prefix = dst_prefix.path_prefix.as_deref().unwrap_or("");
let original_prefix = src_prefix.path_prefix.as_deref().unwrap_or("");
let suffix = &req_path[original_prefix.len()..];
let uri = format!("https://{}{}{}", new_host, new_prefix, suffix);
let status = match StatusCode::from_u16(*code) {
Err(e) => {
warn!(
"Couldn't redirect {}{} to {} as code {} in invalid: {}",
req_host, req_path, uri, code, e
);
return None;
}
Ok(sc) => sc,
};
Response::builder()
.header("Location", uri.clone())
.status(status)
.body(Body::from(uri))
.ok()
}
async fn do_proxy(
https_config: &HttpsConfig,
remote_addr: SocketAddr,
req: Request<Body>,
proxy_to: &ProxyEntry,
) -> Result<Response<Body>> {
let method = req.method().clone();
let accept_encoding = accept_encoding_fork::encodings(req.headers()).unwrap_or_else(|_| vec![]);
let mut response = if proxy_to.https_target {
let to_addr = format!("https://{}", proxy_to.target_addr);
reverse_proxy::call_https(remote_addr.ip(), &to_addr, req).await?
} else {
let to_addr = format!("http://{}", proxy_to.target_addr);
reverse_proxy::call(remote_addr.ip(), &to_addr, req).await?
};
if response.status().is_success() {
// (TODO: maybe we want to add these headers even if it's not a success?)
for (header, value) in proxy_to.add_headers.iter() {
response.headers_mut().insert(
HeaderName::from_bytes(header.as_bytes())?,
HeaderValue::from_str(value)?,
);
}
} }
if https_config.enable_compression {
response = try_compress(response, method, accept_encoding, https_config).await?
};
Ok(response)
} }
async fn try_compress( async fn try_compress(

View file

@ -1,10 +1,8 @@
#[macro_use] #[macro_use]
extern crate anyhow; extern crate anyhow;
use std::collections::BTreeMap; use log::*;
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant;
use tracing::*;
use futures::{FutureExt, TryFutureExt}; use futures::{FutureExt, TryFutureExt};
use std::net::SocketAddr; use std::net::SocketAddr;
@ -14,14 +12,13 @@ use tokio::sync::watch;
mod cert; mod cert;
mod cert_store; mod cert_store;
mod consul;
mod http; mod http;
mod https; mod https;
mod metrics;
mod proxy_config; mod proxy_config;
mod reverse_proxy; mod reverse_proxy;
mod tls_util; mod tls_util;
pub use df_consul as consul;
use proxy_config::ProxyConfig; use proxy_config::ProxyConfig;
#[cfg(feature = "dhat-heap")] #[cfg(feature = "dhat-heap")]
@ -43,10 +40,6 @@ struct Opt {
#[structopt(long = "consul-ca-cert", env = "TRICOT_CONSUL_CA_CERT")] #[structopt(long = "consul-ca-cert", env = "TRICOT_CONSUL_CA_CERT")]
pub consul_ca_cert: Option<String>, pub consul_ca_cert: Option<String>,
/// Skip TLS verification for Consul
#[structopt(long = "consul-tls-skip-verify", env = "TRICOT_CONSUL_TLS_SKIP_VERIFY")]
pub consul_tls_skip_verify: bool,
/// Client certificate for Consul server with TLS /// Client certificate for Consul server with TLS
#[structopt(long = "consul-client-cert", env = "TRICOT_CONSUL_CLIENT_CERT")] #[structopt(long = "consul-client-cert", env = "TRICOT_CONSUL_CLIENT_CERT")]
pub consul_client_cert: Option<String>, pub consul_client_cert: Option<String>,
@ -83,10 +76,6 @@ struct Opt {
)] )]
pub https_bind_addr: SocketAddr, pub https_bind_addr: SocketAddr,
/// Bind address for metrics server (Prometheus format over HTTP)
#[structopt(long = "metrics-bind-addr", env = "TRICOT_METRICS_BIND_ADDR")]
pub metrics_bind_addr: Option<SocketAddr>,
/// E-mail address for Let's Encrypt certificate requests /// E-mail address for Let's Encrypt certificate requests
#[structopt(long = "letsencrypt-email", env = "TRICOT_LETSENCRYPT_EMAIL")] #[structopt(long = "letsencrypt-email", env = "TRICOT_LETSENCRYPT_EMAIL")]
pub letsencrypt_email: String, pub letsencrypt_email: String,
@ -102,12 +91,6 @@ struct Opt {
default_value = "text/html,text/plain,text/css,text/javascript,text/xml,application/javascript,application/json,application/xml,image/svg+xml,font/ttf" default_value = "text/html,text/plain,text/css,text/javascript,text/xml,application/javascript,application/json,application/xml,image/svg+xml,font/ttf"
)] )]
pub compress_mime_types: String, pub compress_mime_types: String,
#[structopt(
long = "warmup-cert-memory-store",
env = "TRICOT_WARMUP_CERT_MEMORY_STORE"
)]
pub warmup_cert_memory_store: bool,
} }
#[tokio::main(flavor = "multi_thread", worker_threads = 10)] #[tokio::main(flavor = "multi_thread", worker_threads = 10)]
@ -118,10 +101,7 @@ async fn main() {
if std::env::var("RUST_LOG").is_err() { if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", "tricot=info") std::env::set_var("RUST_LOG", "tricot=info")
} }
tracing_subscriber::fmt() pretty_env_logger::init();
.with_writer(std::io::stderr)
.with_env_filter(tracing_subscriber::filter::EnvFilter::from_default_env())
.init();
// Abort on panic (same behavior as in Go) // Abort on panic (same behavior as in Go)
std::panic::set_hook(Box::new(|panic_info| { std::panic::set_hook(Box::new(|panic_info| {
@ -139,44 +119,24 @@ async fn main() {
let _ = provoke_exit.send(true); let _ = provoke_exit.send(true);
}; };
let metrics_server = metrics::MetricsServer::init(opt.metrics_bind_addr); let consul_config = consul::ConsulConfig {
let consul_config = consul::Config {
addr: opt.consul_addr.clone(), addr: opt.consul_addr.clone(),
ca_cert: opt.consul_ca_cert.clone(), ca_cert: opt.consul_ca_cert.clone(),
tls_skip_verify: opt.consul_tls_skip_verify,
client_cert: opt.consul_client_cert.clone(), client_cert: opt.consul_client_cert.clone(),
client_key: opt.consul_client_key.clone(), client_key: opt.consul_client_key.clone(),
}; };
let consul = consul::Consul::new(consul_config, &opt.consul_kv_prefix) let consul = consul::Consul::new(consul_config, &opt.consul_kv_prefix, &opt.node_name)
.expect("Error creating Consul client"); .expect("Error creating Consul client");
let rx_proxy_config = proxy_config::spawn_proxy_config_task( let rx_proxy_config =
consul.clone(), proxy_config::spawn_proxy_config_task(consul.clone(), exit_signal.clone());
opt.node_name.clone(),
exit_signal.clone(),
);
let cert_store = cert_store::CertStore::new( let cert_store = cert_store::CertStore::new(
consul.clone(), consul.clone(),
opt.node_name.clone(),
rx_proxy_config.clone(), rx_proxy_config.clone(),
opt.letsencrypt_email.clone(), opt.letsencrypt_email.clone(),
exit_on_err.clone(), exit_on_err.clone(),
); );
if opt.warmup_cert_memory_store {
match cert_store.warmup_memory_store().await {
Err(e) => error!("An error occured while warming up the certificate memory store with Consul data, continue without caching: {e}"),
_ => (),
};
}
let metrics_task = tokio::spawn(
metrics_server
.run(wait_from(exit_signal.clone()))
.map_err(exit_on_err.clone())
.then(|_| async { info!("Metrics server exited") }),
);
let http_task = tokio::spawn( let http_task = tokio::spawn(
http::serve_http( http::serve_http(
@ -196,7 +156,6 @@ async fn main() {
.split(',') .split(',')
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect(), .collect(),
time_origin: Instant::now(),
}; };
let https_task = tokio::spawn( let https_task = tokio::spawn(
@ -212,58 +171,29 @@ async fn main() {
let dump_task = tokio::spawn(dump_config_on_change(rx_proxy_config, exit_signal.clone())); let dump_task = tokio::spawn(dump_config_on_change(rx_proxy_config, exit_signal.clone()));
metrics_task.await.expect("Tokio task await failure"); let _ = http_task.await.expect("Tokio task await failure");
http_task.await.expect("Tokio task await failure"); let _ = https_task.await.expect("Tokio task await failure");
https_task.await.expect("Tokio task await failure"); let _ = dump_task.await.expect("Tokio task await failure");
dump_task.await.expect("Tokio task await failure");
} }
async fn dump_config_on_change( async fn dump_config_on_change(
mut rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>, mut rx_proxy_config: watch::Receiver<Arc<ProxyConfig>>,
mut must_exit: watch::Receiver<bool>, mut must_exit: watch::Receiver<bool>,
) { ) {
let mut old_cfg: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
while !*must_exit.borrow() { while !*must_exit.borrow() {
select!( select!(
c = rx_proxy_config.changed() => { c = rx_proxy_config.changed() => {
if c.is_err() { if !c.is_ok() {
break; break;
} }
} }
_ = must_exit.changed() => continue, _ = must_exit.changed() => continue,
); );
println!("---- PROXY CONFIGURATION ----");
let cfg: Arc<ProxyConfig> = rx_proxy_config.borrow().clone(); for ent in rx_proxy_config.borrow().entries.iter() {
if cfg != old_cfg { println!(" {}", ent);
let mut cfg_map = BTreeMap::<_, Vec<_>>::new();
for ent in cfg.entries.iter() {
cfg_map
.entry((&ent.url_prefix.host, &ent.url_prefix.path_prefix))
.or_default()
.push(ent);
}
println!(
"---- PROXY CONFIGURATION at {} ----",
chrono::offset::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true)
);
for ((host, prefix), ents) in cfg_map.iter_mut() {
println!("{}{}:", host, prefix.as_deref().unwrap_or_default());
for ent in ents.iter() {
print!(" ");
if !ent.flags.healthy {
print!("/!\\ ");
} else {
print!(" ");
}
println!("{}", ent);
}
}
println!();
old_cfg = cfg;
} }
println!();
} }
} }

View file

@ -1,111 +0,0 @@
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::Arc;
use anyhow::Result;
use futures::future::*;
use tracing::*;
use hyper::{
header::CONTENT_TYPE,
service::{make_service_fn, service_fn},
Body, Method, Request, Response, Server,
};
use opentelemetry::sdk::metrics;
use prometheus::{Encoder, TextEncoder};
pub struct MetricsServer {
bind_addr: Option<SocketAddr>,
registry: prometheus::Registry,
}
impl MetricsServer {
pub fn init(bind_addr: Option<SocketAddr>) -> MetricsServer {
let registry = prometheus::Registry::new();
let exporter = opentelemetry_prometheus::exporter()
.with_registry(registry.clone())
.with_aggregation_selector(AggregationSelector)
.without_counter_suffixes()
.build()
.expect("build prometheus registry");
let mp = metrics::MeterProvider::builder()
.with_reader(exporter)
.build();
opentelemetry::global::set_meter_provider(mp);
Self {
bind_addr,
registry,
}
}
pub async fn run(self, shutdown_signal: impl Future<Output = ()>) -> Result<()> {
if let Some(addr) = self.bind_addr {
let metrics_server = Arc::new(self);
let make_svc = make_service_fn(move |_conn| {
let metrics_server = metrics_server.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
metrics_server.clone().serve_req(req)
}))
}
});
let server = Server::bind(&addr).serve(make_svc);
let graceful = server.with_graceful_shutdown(shutdown_signal);
info!("Metrics server listening on http://{}", addr);
graceful.await?;
} else {
info!("Metrics server is disabled");
}
Ok(())
}
async fn serve_req(
self: Arc<MetricsServer>,
req: Request<Body>,
) -> Result<Response<Body>, hyper::Error> {
debug!("{} {}", req.method(), req.uri());
let response = match (req.method(), req.uri().path()) {
(&Method::GET, "/metrics") => {
let mut buffer = vec![];
let encoder = TextEncoder::new();
let metric_families = self.registry.gather();
encoder.encode(&metric_families, &mut buffer).unwrap();
Response::builder()
.status(200)
.header(CONTENT_TYPE, encoder.format_type())
.body(Body::from(buffer))
.unwrap()
}
_ => Response::builder()
.status(404)
.body(Body::from("Not implemented"))
.unwrap(),
};
Ok(response)
}
}
struct AggregationSelector;
impl metrics::reader::AggregationSelector for AggregationSelector {
fn aggregation(&self, kind: metrics::InstrumentKind) -> metrics::Aggregation {
match kind {
metrics::InstrumentKind::Histogram => metrics::Aggregation::ExplicitBucketHistogram {
boundaries: vec![
0.001, 0.0015, 0.002, 0.003, 0.005, 0.007, 0.01, 0.015, 0.02, 0.03, 0.05, 0.07,
0.1, 0.15, 0.2, 0.3, 0.5, 0.7, 1., 1.5, 2., 3., 5., 7., 10., 15., 20., 30.,
40., 50., 60., 70., 100.,
],
record_min_max: true,
},
_ => metrics::reader::DefaultAggregationSelector::new().aggregation(kind),
}
}
}

View file

@ -1,19 +1,21 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::sync::{atomic, Arc}; use std::sync::{atomic, Arc};
use std::time::Duration; use std::{cmp, time::Duration};
use anyhow::Result; use anyhow::Result;
use opentelemetry::{metrics, KeyValue};
use tokio::{select, sync::watch}; use futures::future::BoxFuture;
use tracing::*; use futures::stream::{FuturesUnordered, StreamExt};
use crate::consul; use log::*;
use tokio::{select, sync::watch, time::sleep};
use crate::consul::*;
// ---- Extract proxy config from Consul catalog ---- // ---- Extract proxy config from Consul catalog ----
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug)]
pub enum HostDescription { pub enum HostDescription {
Hostname(String), Hostname(String),
Pattern(glob::Pattern), Pattern(glob::Pattern),
@ -36,153 +38,20 @@ impl HostDescription {
} }
} }
impl std::fmt::Display for HostDescription {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HostDescription::Hostname(h) => write!(f, "{}", h),
HostDescription::Pattern(p) => write!(f, "[{}]", p.as_str()),
}
}
}
#[derive(Debug, Clone)]
pub struct UrlPrefix {
/// Publicly exposed TLS hostnames for matching this rule
pub host: HostDescription,
/// Path prefix for matching this rule
pub path_prefix: Option<String>,
}
impl PartialEq for UrlPrefix {
fn eq(&self, other: &Self) -> bool {
self.host == other.host && self.path_prefix == other.path_prefix
}
}
impl Eq for UrlPrefix {}
impl UrlPrefix {
fn new(raw_prefix: &str) -> Option<Self> {
let (raw_host, path_prefix) = match raw_prefix.find('/') {
Some(i) => {
let (host, pp) = raw_prefix.split_at(i);
(host, Some(pp.to_string()))
}
None => (raw_prefix, None),
};
let host = match HostDescription::new(raw_host) {
Ok(h) => h,
Err(e) => {
warn!("Invalid hostname pattern {}: {}", raw_host, e);
return None;
}
};
Some(Self { host, path_prefix })
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct ProxyEntry { pub struct ProxyEntry {
/// An Url prefix is made of a host and maybe a path prefix /// Publicly exposed TLS hostnames for matching this rule
pub url_prefix: UrlPrefix, pub host: HostDescription,
/// Path prefix for matching this rule
pub path_prefix: Option<String>,
/// Priority with which this rule is considered (highest first) /// Priority with which this rule is considered (highest first)
pub priority: u32, pub priority: u32,
/// Consul service name
pub service_name: String,
/// Node address (ip+port) to handle requests that match this entry /// Node address (ip+port) to handle requests that match this entry
pub target_addr: SocketAddr, pub target_addr: SocketAddr,
/// Is the target serving HTTPS instead of HTTP? /// Is the target serving HTTPS instead of HTTP?
pub https_target: bool, pub https_target: bool,
/// Flags for target selection
pub flags: ProxyEntryFlags,
/// Add the following headers to all responses returned
/// when matching this rule
pub add_headers: Vec<(String, String)>,
/// Try to match all these redirection before forwarding to the backend
/// when matching this rule
pub redirects: Vec<(UrlPrefix, UrlPrefix, u16)>,
/// Wether or not the domain must be validated before asking a certificate
/// to let's encrypt (only for Glob patterns)
pub on_demand_tls_ask: Option<String>,
/// Number of calls in progress, used to deprioritize slow back-ends
pub calls_in_progress: atomic::AtomicI64,
/// Time of last call, used for round-robin selection
pub last_call: atomic::AtomicI64,
}
impl PartialEq for ProxyEntry {
fn eq(&self, other: &Self) -> bool {
self.url_prefix == other.url_prefix
&& self.priority == other.priority
&& self.service_name == other.service_name
&& self.target_addr == other.target_addr
&& self.https_target == other.https_target
&& self.flags == other.flags
&& self.add_headers == other.add_headers
}
}
impl Eq for ProxyEntry {}
impl ProxyEntry {
fn new(
service_name: String,
frontend: MatchTag,
target_addr: SocketAddr,
middleware: &[ConfigTag],
flags: ProxyEntryFlags,
) -> Self {
let (url_prefix, priority, https_target) = match frontend {
MatchTag::Http(u, p) => (u, p, false),
MatchTag::HttpWithTls(u, p) => (u, p, true),
};
let mut add_headers = vec![];
let mut redirects = vec![];
let mut on_demand_tls_ask: Option<String> = None;
for mid in middleware.into_iter() {
// LocalLb and GlobalLb are handled in the parent function
match mid {
ConfigTag::AddHeader(k, v) => add_headers.push((k.to_string(), v.clone())),
ConfigTag::AddRedirect(m, r, c) => redirects.push(((*m).clone(), (*r).clone(), *c)),
ConfigTag::OnDemandTlsAsk(url) => on_demand_tls_ask = Some(url.to_string()),
ConfigTag::LocalLb | ConfigTag::GlobalLb => (),
};
}
ProxyEntry {
// id
service_name,
// frontend
url_prefix,
priority,
// backend
target_addr,
https_target,
// middleware
flags,
add_headers,
redirects,
on_demand_tls_ask,
// internal
last_call: atomic::AtomicI64::from(0),
calls_in_progress: atomic::AtomicI64::from(0),
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct ProxyEntryFlags {
/// Is the target healthy?
pub healthy: bool,
/// Is the target the same node as we are running on? /// Is the target the same node as we are running on?
/// (if yes priorize it over other matching targets) /// (if yes priorize it over other matching targets)
pub same_node: bool, pub same_node: bool,
@ -190,10 +59,14 @@ pub struct ProxyEntryFlags {
/// (if yes priorize it over other matching targets) /// (if yes priorize it over other matching targets)
pub same_site: bool, pub same_site: bool,
/// Is site-wide load balancing enabled for this service? /// Add the following headers to all responses returned
pub site_lb: bool, /// when matching this rule
/// Is global load balancing enabled for this service? pub add_headers: Vec<(String, String)>,
pub global_lb: bool,
// Counts the number of times this proxy server has been called to
// This implements a round-robin load balancer if there are multiple
// entries for the same host and same path prefix.
pub calls: atomic::AtomicI64,
} }
impl std::fmt::Display for ProxyEntry { impl std::fmt::Display for ProxyEntry {
@ -202,203 +75,148 @@ impl std::fmt::Display for ProxyEntry {
write!(f, "https://")?; write!(f, "https://")?;
} }
write!(f, "{} ", self.target_addr)?; write!(f, "{} ", self.target_addr)?;
match &self.host {
HostDescription::Hostname(h) => write!(f, "{}", h)?,
HostDescription::Pattern(p) => write!(f, "Pattern('{}')", p.as_str())?,
}
write!( write!(
f, f,
"{}{} {}", "{} {}",
self.url_prefix.host, self.path_prefix.as_ref().unwrap_or(&String::new()),
self.url_prefix.path_prefix.as_deref().unwrap_or_default(),
self.priority self.priority
)?; )?;
if !self.flags.healthy { if self.same_node {
write!(f, " UNHEALTHY")?;
}
if self.flags.same_node {
write!(f, " OURSELF")?; write!(f, " OURSELF")?;
} else if self.flags.same_site { } else if self.same_site {
write!(f, " SAME_SITE")?; write!(f, " SAME_SITE")?;
} }
if self.flags.global_lb {
write!(f, " GLOBAL-LB")?;
} else if self.flags.site_lb {
write!(f, " SITE-LB")?;
}
if !self.add_headers.is_empty() { if !self.add_headers.is_empty() {
write!(f, " +Headers: {:?}", self.add_headers)?; write!(f, " +Headers: {:?}", self.add_headers)?;
} }
Ok(()) write!(f, " ({})", self.calls.load(atomic::Ordering::Relaxed))
} }
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug)]
pub struct ProxyConfig { pub struct ProxyConfig {
pub entries: Vec<ProxyEntry>, pub entries: Vec<ProxyEntry>,
} }
#[derive(Debug)] fn retry_to_time(retries: u32, max_time: Duration) -> Duration {
enum ParsedTag<'a> { // 1.2^x seems to be a good value to exponentially increase time at a good pace
Frontend(MatchTag), // eg. 1.2^32 = 341 seconds ~= 5 minutes - ie. after 32 retries we wait 5
Middleware(ConfigTag<'a>), // minutes
Duration::from_secs(cmp::min(
max_time.as_secs(),
1.2f64.powf(retries as f64) as u64,
))
} }
#[derive(Debug)] fn parse_tricot_tag(
enum MatchTag { tag: &str,
/// HTTP backend (plain text) target_addr: SocketAddr,
Http(UrlPrefix, u32), add_headers: &[(String, String)],
/// HTTPS backend (TLS encrypted) same_node: bool,
HttpWithTls(UrlPrefix, u32), same_site: bool,
} ) -> Option<ProxyEntry> {
let splits = tag.split(' ').collect::<Vec<_>>();
if (splits.len() != 2 && splits.len() != 3)
|| (splits[0] != "tricot" && splits[0] != "tricot-https")
{
return None;
}
#[derive(Debug)] let (host, path_prefix) = match splits[1].find('/') {
enum ConfigTag<'a> { Some(i) => {
AddHeader(&'a str, String), let (host, pp) = splits[1].split_at(i);
AddRedirect(UrlPrefix, UrlPrefix, u16), (host, Some(pp.to_string()))
OnDemandTlsAsk(&'a str),
GlobalLb,
LocalLb,
}
fn parse_tricot_tags(tag: &str) -> Option<ParsedTag> {
let splits = tag.splitn(4, ' ').collect::<Vec<_>>();
let parsed_tag = match splits.as_slice() {
["tricot", raw_prefix, maybe_priority @ ..] => {
// priority is set to 100 when value is invalid or missing
let priority: u32 = maybe_priority
.iter()
.next()
.map_or(Ok(100), |x| x.parse::<u32>())
.unwrap_or(100);
UrlPrefix::new(raw_prefix)
.map(|prefix| ParsedTag::Frontend(MatchTag::Http(prefix, priority)))
} }
["tricot-https", raw_prefix, maybe_priority @ ..] => { None => (splits[1], None),
// priority is set to 100 when value is invalid or missing
let priority: u32 = maybe_priority
.iter()
.next()
.map_or(Ok(100), |x| x.parse::<u32>())
.unwrap_or(100);
UrlPrefix::new(raw_prefix)
.map(|prefix| ParsedTag::Frontend(MatchTag::HttpWithTls(prefix, priority)))
}
["tricot-add-header", header_key, header_values @ ..] => Some(ParsedTag::Middleware(
ConfigTag::AddHeader(header_key, header_values.join(" ")),
)),
["tricot-add-redirect", raw_match, raw_replace, maybe_raw_code @ ..] => {
let (p_match, p_replace) =
match (UrlPrefix::new(raw_match), UrlPrefix::new(raw_replace)) {
(Some(m), Some(r)) => (m, r),
_ => {
debug!(
"tag {} is ignored, one of the url prefix can't be parsed",
tag
);
return None;
}
};
if matches!(p_replace.host, HostDescription::Pattern(_)) {
debug!(
"tag {} ignored as redirect to a glob pattern is not supported",
tag
);
return None;
}
let maybe_parsed_code = maybe_raw_code
.iter()
.next()
.map(|c| c.parse::<u16>().ok())
.flatten();
let http_code = match maybe_parsed_code {
Some(301) => 301,
Some(302) => 302,
Some(303) => 303,
Some(307) => 307,
_ => {
debug!(
"tag {} has a missing or invalid http code, setting it to 302",
tag
);
302
}
};
Some(ParsedTag::Middleware(ConfigTag::AddRedirect(
p_match, p_replace, http_code,
)))
}
["tricot-on-demand-tls-ask", url, ..] => {
Some(ParsedTag::Middleware(ConfigTag::OnDemandTlsAsk(url)))
}
["tricot-global-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::GlobalLb)),
["tricot-local-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::LocalLb)),
_ => None,
}; };
trace!("tag {} parsed as {:?}", tag, parsed_tag); let priority = match splits.len() {
parsed_tag 3 => splits[2].parse().ok()?,
_ => 100,
};
let host = match HostDescription::new(host) {
Ok(h) => h,
Err(e) => {
warn!("Invalid hostname pattern {}: {}", host, e);
return None;
}
};
Some(ProxyEntry {
target_addr,
https_target: (splits[0] == "tricot-https"),
host,
same_node,
same_site,
path_prefix,
priority,
add_headers: add_headers.to_vec(),
calls: atomic::AtomicI64::from(0),
})
} }
fn parse_consul_service( fn parse_tricot_add_header_tag(tag: &str) -> Option<(String, String)> {
s: &consul::catalog::HealthServiceNode, let splits = tag.splitn(3, ' ').collect::<Vec<_>>();
mut flags: ProxyEntryFlags, if splits.len() == 3 && splits[0] == "tricot-add-header" {
Some((splits[1].to_string(), splits[2].to_string()))
} else {
None
}
}
fn parse_consul_catalog(
catalog: &ConsulNodeCatalog,
same_node: bool,
same_site: bool,
) -> Vec<ProxyEntry> { ) -> Vec<ProxyEntry> {
trace!("Parsing service: {:#?}", s); trace!("Parsing node catalog: {:#?}", catalog);
let ip_addr = match s.service.address.parse() { let mut entries = vec![];
Ok(ip) => ip,
_ => match s.node.address.parse() { for (_, svc) in catalog.services.iter() {
let ip_addr = match svc.address.parse() {
Ok(ip) => ip, Ok(ip) => ip,
_ => { _ => match catalog.node.address.parse() {
warn!( Ok(ip) => ip,
"Could not get address for service {} at node {}", _ => {
s.service.service, s.node.node warn!(
); "Could not get address for service {} at node {}",
return vec![]; svc.service, catalog.node.node
} );
}, continue;
}; }
let addr = SocketAddr::new(ip_addr, s.service.port); },
};
let addr = SocketAddr::new(ip_addr, svc.port);
// tag parsing let (same_node, same_site) = if svc.tags.contains(&"tricot-global-lb".into()) {
let mut collected_middleware = vec![]; (false, false)
let mut collected_frontends = vec![]; } else if svc.tags.contains(&"tricot-site-lb".into()) {
for tag in s.service.tags.iter() { (false, same_site)
match parse_tricot_tags(tag) { } else {
Some(ParsedTag::Frontend(x)) => collected_frontends.push(x), (same_node, same_site)
Some(ParsedTag::Middleware(y)) => collected_middleware.push(y), };
_ => trace!(
"service {}: tag '{}' could not be parsed", let mut add_headers = vec![];
s.service.service, for tag in svc.tags.iter() {
tag if let Some(pair) = parse_tricot_add_header_tag(tag) {
), add_headers.push(pair);
}
}
for tag in svc.tags.iter() {
if let Some(ent) = parse_tricot_tag(tag, addr, &add_headers[..], same_node, same_site) {
entries.push(ent);
}
} }
} }
// some legacy processing that would need a refactor later trace!("Result of parsing catalog:");
for mid in collected_middleware.iter() {
match mid {
ConfigTag::GlobalLb => flags.global_lb = true,
ConfigTag::LocalLb => flags.site_lb = true,
_ => (),
};
}
// build proxy entries
let entries = collected_frontends
.into_iter()
.map(|frt| {
ProxyEntry::new(
s.service.service.clone(),
frt,
addr,
collected_middleware.as_ref(),
flags,
)
})
.collect::<Vec<_>>();
trace!("Result of parsing service:");
for ent in entries.iter() { for ent in entries.iter() {
trace!(" {}", ent); trace!(" {}", ent);
} }
@ -406,128 +224,143 @@ fn parse_consul_service(
entries entries
} }
#[derive(Default)]
struct NodeWatchState {
last_idx: Option<usize>,
last_catalog: Option<ConsulNodeCatalog>,
retries: u32,
}
pub fn spawn_proxy_config_task( pub fn spawn_proxy_config_task(
consul: consul::Consul, consul: Consul,
local_node: String,
mut must_exit: watch::Receiver<bool>, mut must_exit: watch::Receiver<bool>,
) -> watch::Receiver<Arc<ProxyConfig>> { ) -> watch::Receiver<Arc<ProxyConfig>> {
let (tx, rx) = watch::channel(Arc::new(ProxyConfig { let (tx, rx) = watch::channel(Arc::new(ProxyConfig {
entries: Vec::new(), entries: Vec::new(),
})); }));
let metrics = ProxyConfigMetrics::new(rx.clone());
let consul = Arc::new(consul); let consul = Arc::new(consul);
tokio::spawn(async move { tokio::spawn(async move {
let mut catalog_rx = consul.watch_all_service_health(Duration::from_secs(300)); let mut nodes = HashMap::new();
let mut local_node_site = None; let mut watches = FuturesUnordered::<BoxFuture<'static, (String, Result<_>)>>::new();
let mut node_site = HashMap::new();
while !*must_exit.borrow() { while !*must_exit.borrow() {
select! { let list_nodes = select! {
_ = catalog_rx.changed() => (), ln = consul.list_nodes() => ln,
_ = must_exit.changed() => continue, _ = must_exit.changed() => continue,
}; };
let services = catalog_rx.borrow_and_update().clone(); match list_nodes {
if local_node_site.is_none() { Ok(consul_nodes) => {
for (_, svcnodes) in services.iter() { info!("Watched consul nodes: {:?}", consul_nodes);
for svcnode in svcnodes.iter() { for consul_node in consul_nodes {
if svcnode.node.node == local_node { let node = &consul_node.node;
if let Some(site) = svcnode.node.meta.get("site") { if !nodes.contains_key(node) {
local_node_site = Some(site.to_string()); nodes.insert(node.clone(), NodeWatchState::default());
}
let node = node.to_string();
let consul = consul.clone();
watches.push(Box::pin(async move {
let res = consul.watch_node(&node, None).await;
(node, res)
}));
}
if let Some(site) = consul_node.meta.get("site") {
node_site.insert(node.clone(), site.clone());
} }
} }
} }
Err(e) => {
warn!("Could not get Consul node list: {}", e);
}
}
let next_watch = select! {
nw = watches.next() => nw,
_ = must_exit.changed() => continue,
};
let (node, res): (String, Result<_>) = match next_watch {
Some(v) => v,
None => {
warn!("No nodes currently watched in proxy_config.rs");
sleep(Duration::from_secs(10)).await;
continue;
}
};
match res {
Ok((catalog, new_idx)) => {
let mut watch_state = nodes.get_mut(&node).unwrap();
watch_state.last_idx = Some(new_idx);
watch_state.last_catalog = Some(catalog);
watch_state.retries = 0;
let idx = watch_state.last_idx;
let consul = consul.clone();
watches.push(Box::pin(async move {
let res = consul.watch_node(&node, idx).await;
(node, res)
}));
}
Err(e) => {
let mut watch_state = nodes.get_mut(&node).unwrap();
watch_state.retries += 1;
watch_state.last_idx = None;
let will_retry_in =
retry_to_time(watch_state.retries, Duration::from_secs(600));
error!(
"Failed to query consul for node {}. Will retry in {}s. {}",
node,
will_retry_in.as_secs(),
e
);
let consul = consul.clone();
watches.push(Box::pin(async move {
sleep(will_retry_in).await;
let res = consul.watch_node(&node, None).await;
(node, res)
}));
continue;
}
} }
let mut entries = vec![]; let mut entries = vec![];
for (node_name, watch_state) in nodes.iter() {
if let Some(catalog) = &watch_state.last_catalog {
let same_node = *node_name == consul.local_node;
let same_site =
match (node_site.get(node_name), node_site.get(&consul.local_node)) {
(Some(s1), Some(s2)) => s1 == s2,
_ => false,
};
for (_service, svcnodes) in services.iter() { entries.extend(parse_consul_catalog(catalog, same_node, same_site));
for svcnode in svcnodes.iter() {
let healthy = !svcnode.checks.iter().any(|x| x.status == "critical");
let same_node = svcnode.node.node == local_node;
let same_site = match (svcnode.node.meta.get("site"), local_node_site.as_ref())
{
(Some(s1), Some(s2)) => s1 == s2,
_ => false,
};
let flags = ProxyEntryFlags {
healthy,
same_node,
same_site,
site_lb: false,
global_lb: false,
};
entries.extend(parse_consul_service(&svcnode, flags));
} }
} }
entries.sort_by_cached_key(|ent| ent.to_string());
let config = ProxyConfig { entries }; let config = ProxyConfig { entries };
tx.send(Arc::new(config)).expect("Internal error"); tx.send(Arc::new(config)).expect("Internal error");
} }
drop(metrics); // ensure Metrics lives up to here
}); });
rx rx
} }
// ----
struct ProxyConfigMetrics {
_proxy_config_entries: metrics::ObservableGauge<u64>,
}
impl ProxyConfigMetrics {
fn new(rx: watch::Receiver<Arc<ProxyConfig>>) -> Self {
let meter = opentelemetry::global::meter("tricot");
Self {
_proxy_config_entries: meter
.u64_observable_gauge("proxy_config_entries")
.with_callback(move |observer| {
let mut patterns = HashMap::new();
for ent in rx.borrow().entries.iter() {
let attrs = (
ent.url_prefix.host.to_string(),
ent.url_prefix.path_prefix.clone().unwrap_or_default(),
ent.service_name.clone(),
);
*patterns.entry(attrs).or_default() += 1;
}
for ((host, prefix, svc), num) in patterns {
observer.observe(
num,
&[
KeyValue::new("host", host),
KeyValue::new("path_prefix", prefix),
KeyValue::new("service", svc),
],
);
}
})
.with_description("Number of proxy entries (back-ends) configured in Tricot")
.init(),
}
}
}
// ----
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_parse_tricot_add_header_tag() { fn test_parse_tricot_add_header_tag() {
match parse_tricot_tags("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") { match parse_tricot_add_header_tag("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") {
Some(ParsedTag::Middleware(ConfigTag::AddHeader(name, value))) => { Some((name, value)) => {
assert_eq!(name, "Content-Security-Policy"); assert_eq!(name, "Content-Security-Policy");
assert_eq!(value, "default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'"); assert_eq!(value, "default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'");
} }

View file

@ -8,9 +8,9 @@ use std::sync::Arc;
use std::time::{Duration, SystemTime}; use std::time::{Duration, SystemTime};
use anyhow::Result; use anyhow::Result;
use tracing::*; use log::*;
use http::{header::HeaderName, StatusCode}; use http::header::HeaderName;
use hyper::header::{HeaderMap, HeaderValue}; use hyper::header::{HeaderMap, HeaderValue};
use hyper::{client::HttpConnector, header, Body, Client, Request, Response, Uri}; use hyper::{client::HttpConnector, header, Body, Client, Request, Response, Uri};
use rustls::client::{ServerCertVerified, ServerCertVerifier}; use rustls::client::{ServerCertVerified, ServerCertVerifier};
@ -51,22 +51,20 @@ fn remove_hop_headers(headers: &HeaderMap<HeaderValue>) -> HeaderMap<HeaderValue
fn copy_upgrade_headers( fn copy_upgrade_headers(
old_headers: &HeaderMap<HeaderValue>, old_headers: &HeaderMap<HeaderValue>,
new_headers: &mut HeaderMap<HeaderValue>, new_headers: &mut HeaderMap<HeaderValue>,
) -> Result<bool> { ) -> Result<()> {
// The Connection header is stripped as it is a hop header that we are not supposed to proxy. // The Connection header is stripped as it is a hop header that we are not supposed to proxy.
// However, it might also contain an Upgrade directive, e.g. for Websockets: // However, it might also contain an Upgrade directive, e.g. for Websockets:
// when that happen, we do want to preserve that directive. // when that happen, we do want to preserve that directive.
let mut is_upgrade = false;
if let Some(conn) = old_headers.get(header::CONNECTION) { if let Some(conn) = old_headers.get(header::CONNECTION) {
let conn_str = conn.to_str()?.to_lowercase(); let conn_str = conn.to_str()?.to_lowercase();
if conn_str.split(',').map(str::trim).any(|x| x == "upgrade") { if conn_str.split(',').map(str::trim).any(|x| x == "upgrade") {
if let Some(upgrade) = old_headers.get(header::UPGRADE) { if let Some(upgrade) = old_headers.get(header::UPGRADE) {
new_headers.insert(header::CONNECTION, "Upgrade".try_into()?); new_headers.insert(header::CONNECTION, "Upgrade".try_into()?);
new_headers.insert(header::UPGRADE, upgrade.clone()); new_headers.insert(header::UPGRADE, upgrade.clone());
is_upgrade = true;
} }
} }
} }
Ok(is_upgrade) Ok(())
} }
fn forward_uri<B>(forward_url: &str, req: &Request<B>) -> Result<Uri> { fn forward_uri<B>(forward_url: &str, req: &Request<B>) -> Result<Uri> {
@ -78,11 +76,11 @@ fn forward_uri<B>(forward_url: &str, req: &Request<B>) -> Result<Uri> {
Ok(Uri::from_str(forward_uri.as_str())?) Ok(Uri::from_str(forward_uri.as_str())?)
} }
fn create_proxied_request<B: std::default::Default>( fn create_proxied_request<B>(
client_ip: IpAddr, client_ip: IpAddr,
forward_url: &str, forward_url: &str,
request: Request<B>, request: Request<B>,
) -> Result<(Request<B>, Option<Request<B>>)> { ) -> Result<Request<B>> {
let mut builder = Request::builder() let mut builder = Request::builder()
.method(request.method()) .method(request.method())
.uri(forward_uri(forward_url, &request)?) .uri(forward_uri(forward_url, &request)?)
@ -133,57 +131,19 @@ fn create_proxied_request<B: std::default::Default>(
); );
// Proxy upgrade requests properly // Proxy upgrade requests properly
let is_upgrade = copy_upgrade_headers(old_headers, new_headers)?; copy_upgrade_headers(old_headers, new_headers)?;
if is_upgrade { Ok(builder.body(request.into_body())?)
Ok((builder.body(B::default())?, Some(request)))
} else {
Ok((builder.body(request.into_body())?, None))
}
} }
async fn create_proxied_response<B: std::default::Default + Send + Sync + 'static>( fn create_proxied_response<B>(mut response: Response<B>) -> Result<Response<B>> {
mut response: Response<B>,
upgrade_request: Option<Request<B>>,
) -> Result<Response<B>> {
let old_headers = response.headers(); let old_headers = response.headers();
let mut new_headers = remove_hop_headers(old_headers); let mut new_headers = remove_hop_headers(old_headers);
copy_upgrade_headers(old_headers, &mut new_headers)?; copy_upgrade_headers(old_headers, &mut new_headers)?;
if response.status() == StatusCode::SWITCHING_PROTOCOLS { *response.headers_mut() = new_headers;
if let Some(mut req) = upgrade_request { Ok(response)
let mut res_upgraded = hyper::upgrade::on(response).await?;
tokio::spawn(async move {
match hyper::upgrade::on(&mut req).await {
Ok(mut req_upgraded) => {
if let Err(e) =
tokio::io::copy_bidirectional(&mut req_upgraded, &mut res_upgraded)
.await
{
warn!("Error copying data in upgraded request: {}", e);
}
}
Err(e) => {
warn!(
"Could not upgrade client request when switching protocols: {}",
e
);
}
}
});
let mut new_res = Response::builder().status(StatusCode::SWITCHING_PROTOCOLS);
*new_res.headers_mut().unwrap() = new_headers;
Ok(new_res.body(B::default())?)
} else {
Err(anyhow!("Switching protocols but not an upgrade request"))
}
} else {
*response.headers_mut() = new_headers;
Ok(response)
}
} }
pub async fn call( pub async fn call(
@ -191,8 +151,7 @@ pub async fn call(
forward_uri: &str, forward_uri: &str,
request: Request<Body>, request: Request<Body>,
) -> Result<Response<Body>> { ) -> Result<Response<Body>> {
let (proxied_request, upgrade_request) = let proxied_request = create_proxied_request(client_ip, forward_uri, request)?;
create_proxied_request(client_ip, forward_uri, request)?;
trace!("Proxied request: {:?}", proxied_request); trace!("Proxied request: {:?}", proxied_request);
@ -205,7 +164,7 @@ pub async fn call(
trace!("Inner response: {:?}", response); trace!("Inner response: {:?}", response);
let proxied_response = create_proxied_response(response, upgrade_request).await?; let proxied_response = create_proxied_response(response)?;
Ok(proxied_response) Ok(proxied_response)
} }
@ -214,8 +173,7 @@ pub async fn call_https(
forward_uri: &str, forward_uri: &str,
request: Request<Body>, request: Request<Body>,
) -> Result<Response<Body>> { ) -> Result<Response<Body>> {
let (proxied_request, upgrade_request) = let proxied_request = create_proxied_request(client_ip, forward_uri, request)?;
create_proxied_request(client_ip, forward_uri, request)?;
trace!("Proxied request (HTTPS): {:?}", proxied_request); trace!("Proxied request (HTTPS): {:?}", proxied_request);
@ -233,7 +191,7 @@ pub async fn call_https(
trace!("Inner response (HTTPS): {:?}", response); trace!("Inner response (HTTPS): {:?}", response);
let proxied_response = create_proxied_response(response, upgrade_request).await?; let proxied_response = create_proxied_response(response)?;
Ok(proxied_response) Ok(proxied_response)
} }