Compare commits

...

19 commits
v0.1.1 ... main

16 changed files with 1388 additions and 609 deletions

1
.gitignore vendored
View file

@ -1,2 +1 @@
/target
/Cargo.lock

460
Cargo.lock generated Normal file
View file

@ -0,0 +1,460 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bitflags"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chacha20"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common",
"inout",
"zeroize",
]
[[package]]
name = "cpufeatures"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
dependencies = [
"libc",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "curve25519-dalek"
version = "4.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c"
dependencies = [
"cfg-if",
"cpufeatures",
"curve25519-dalek-derive",
"fiat-crypto",
"platforms",
"rustc_version",
"subtle",
"zeroize",
]
[[package]]
name = "curve25519-dalek-derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "dryoc"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65f5013c48133363c5a8db6bc74511b8b254680929c7757d9d833dea18c12f13"
dependencies = [
"bitflags",
"chacha20",
"curve25519-dalek",
"generic-array",
"lazy_static",
"libc",
"rand_core",
"salsa20",
"sha2",
"subtle",
"winapi",
"zeroize",
]
[[package]]
name = "err-derive"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34a887c8df3ed90498c1c437ce21f211c8e27672921a8ffa293cb8d6d4caa9e"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"rustversion",
"syn 1.0.109",
"synstructure",
]
[[package]]
name = "fiat-crypto"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a56f0780318174bad1c127063fd0c5fdfb35398e3cd79ffaab931a6c79df80"
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getrandom"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "inout"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
dependencies = [
"generic-array",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "memchr"
version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nettext"
version = "0.4.1"
dependencies = [
"base64",
"dryoc",
"err-derive",
"hex",
"nom",
"serde",
]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "platforms"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "salsa20"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
dependencies = [
"cipher",
]
[[package]]
name = "semver"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
[[package]]
name = "serde"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "subtle"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "zeroize"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
dependencies = [
"zeroize_derive",
]
[[package]]
name = "zeroize_derive"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]

View file

@ -2,7 +2,7 @@
name = "nettext"
description = "A text-based data format for cryptographic network protocols"
authors = ["Alex Auvolat <alex@adnab.me>"]
version = "0.1.1"
version = "0.4.1"
edition = "2021"
license = "AGPL-3.0"
readme = "README.md"
@ -12,11 +12,11 @@ readme = "README.md"
[dependencies]
nom = "7.1"
base64 = "0.13"
hex = "0.4"
err-derive = "0.3"
blake2 = { version = "0.10", optional = true }
rand = "0.7"
ed25519-dalek = { version = "1.0", optional = true }
dryoc = { version = "0.5", optional = true }
serde = { version = "1.0", optional = true, features = ["derive"] }
[features]
default = [ "blake2", "ed25519-dalek", "serde" ]
default = [ "dryoc", "serde" ]

View file

@ -13,24 +13,28 @@ A text-based data format for cryptographic network protocols.
A term can be of any of the following kinds:
- a string, which may contain only ASCII alphanumeric terms and `.-_*?`
- a string, which may contain only ASCII alphanumeric characters and a limited subset of other ASCII characters that may not include characters used to represent other kinds of terms
- a dict, which maps strings (as defined above) to any term type
- a list, which is a consecutive sequence of at least 2 strings or dicts (can be mixed), simply separated by whitespace
Nested lists can be represented using a special dictionnary with a single key, `.`,
for instance `TEST a { . = 0 4 2 1 9 7 0 } c`.
- a list, which may contain any number of any kind of terms (can be mixed)
- a sequence, consistuted of at least two of the above (can be mixed), simply separated by whitespace; sequences cannot be nested
Dicts are represented as follows:
```
{
key1 = value1,
key1 = value1;
key2 = value2
}
```
Lists are represented as follows:
```
[ term1; term2 ]
```
Sequences are represented as follows:
```
term1 term2 term3
```
@ -38,21 +42,26 @@ term1 term2 term3
As a consequence, complex data structures can be defined as follows:
```
SENDTO alex {
topic = blah,
SEND MESSAGE {
topic = blah;
to = [
TOPIC hello;
USER john
],
body = blah blah
}
```
The raw representation of a parsed dict or list is retained for hashing purposes.
The raw representation of a parsed dict or sequence is retained for hashing purposes.
It in the sequence of bytes, in the encoded string, trimmed from whitespace at extremities,
that represents the encoded dict or list in that string.
that represents the encoded dict or sequence in that string.
In the complex stance example above, here are the lists and dicts and their raw representation:
In the complex stance example above, here are the sequence and dicts and their raw representation:
- the toplevel term is a list, whose raw representation is the entire encoded string (assuming no whitespace at beginning or end)
- the third term of the list is a dict, whose raw representation starts at `{` and ends at `}`
- the second mapping of the dict is a list, whose raw representation is exactly `blah blah`.
- the toplevel term is a sequence, whose raw representation is the entire encoded string (assuming no whitespace at beginning or end)
- the third term of the sequence is a dict, whose raw representation starts at `{` and ends at `}`
- the second mapping of the dict is a list, whose raw representation starts at `[` and ends at `]`
- the third mapping of the dict is a sequence, whose raw representation is exactly `blah blah`.
Since strings cannot contain whitespace, they are always equivalent to their raw representation.
@ -61,18 +70,42 @@ Since strings cannot contain whitespace, they are always equivalent to their raw
Terms can be interpreted in a number of different ways, depending on the context:
- RAW: the term is interpreted as its raw encoding (see above)
- STRING: if the term is a string or a list composed exclusively of strings, the term is interpreted as its raw encoding
- VARIANT: if the term is a list whose first item is a string, it is interpreted as a variant with the following properties:
- STRING: if the term is a string or a sequence composed exclusively of strings, the term is interpreted as its raw encoding
- VARIANT: if the term is a sequence whose first item is a string, it is interpreted as a variant with the following properties:
- a discriminator (the first item)
- a value, which is either the second item in case there are only two items, or the list composed of all items starting from the second if there are more than two
- a value, which is either the second item in case there are only two items, or the sequence composed of all items starting from the second if there are more than two
- DICT: if the term is a dict, interpret it as such
- LIST: if the term is a string or a dict, interpret it as a list composed of that single term. Otherwise, the term is a list, interpret it as a list of terms.
- NESTED: if the term is a dict with a single key `.`, interpret it as the term associated to that key
- LIST: if the term is a list, interpret it as such
- SEQ: if the term is a string, a list, or a dict, interpret it as a sequence composed of that single term. Otherwise, the term is a sequence, interpret it as a sequence of terms.
## Data mappings
Terms further have mappings as different data types:
- BYTES: if the term maps as a STRING, decode it using base64
- INT: if the term maps as a STRING, decode it as an integer written in decimal notation
- BYTES: if the term maps as a STRING, decode it using base64. Since a STRING cannot be empty, the string `-` is used to represent an empty byte string.
- Cryptographic data types (see below)
## Cryptographic data types
Cryptographic values such as keys, hashes, signatures, etc. are encoded
as STRING with a prefix indicating the algorithm used, followed by ":",
followed by the base64-encoded value.
Prefixes are as follows:
- `pk.box:` public key for NaCl's box API
- `sk.box:` secret key for NaCl's box API
- `sk.sbox:` secret key for NaCl's secretbox API
- `h.sha256:` sha256 hash
- `h.sha512:` sha512 hash
- `h.sha3:` sha3 hash
- `h.b2:` blake2b hash
- `h.b3:` blake3 hash
- `sig.ed25519:` ed25519 signature
- `pk.ed25519:` ed25519 public signing key
- `sk.ed25519:` ed25519 secret signing key
More can be added.
- HASH, PUBKEY, SECKEY, SIGNATURE, ENCKEY, DECKEY, SYMKEY: a bunch of things that interpret BYTES as specific cryptographic items

View file

@ -1,42 +0,0 @@
use blake2::{Blake2b512, Digest};
use crate::crypto::CryptoError;
use crate::enc;
/// A Blake2b512 digest
#[derive(Clone, Copy, Eq, PartialEq)]
pub struct Blake2Sum([u8; 64]);
impl Blake2Sum {
/// Create a Blake2Sum object by passing the digest as bytes directly
pub fn from_bytes(bytes: [u8; 64]) -> Self {
Self(bytes)
}
/// Compute the Blake2b512 digest of a byte slice
pub fn compute(buf: &[u8]) -> Self {
let mut hasher = Blake2b512::new();
hasher.update(buf);
Self(hasher.finalize()[..].try_into().unwrap())
}
/// Check that this digest corresponds to a given slice
pub fn verify(&self, buf: &[u8]) -> Result<(), CryptoError> {
if Self::compute(buf) == *self {
Ok(())
} else {
Err(CryptoError::InvalidHash)
}
}
/// Return a reference to the inner byte slice
pub fn as_bytes(&self) -> &[u8] {
&self.0[..]
}
}
impl enc::Encode for Blake2Sum {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(self.as_bytes()))
}
}

View file

@ -1,35 +0,0 @@
use rand::prelude::*;
use crate::enc;
pub use ed25519_dalek::{Keypair, PublicKey, SecretKey, Signature, Signer, Verifier};
/// Generate a public/secret Ed25519 keypair
pub fn generate_keypair() -> Keypair {
let mut csprng = thread_rng();
Keypair::generate(&mut csprng)
}
impl enc::Encode for Keypair {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(&self.to_bytes()))
}
}
impl enc::Encode for PublicKey {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(self.as_bytes()))
}
}
impl enc::Encode for SecretKey {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(self.as_bytes()))
}
}
impl enc::Encode for Signature {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(&self.to_bytes()))
}
}

View file

@ -1,21 +1,158 @@
//! Helpers to use cryptographic data types in nettext
#[cfg(feature = "blake2")]
mod b2;
pub use dryoc;
#[cfg(feature = "blake2")]
pub use b2::*;
use dryoc::types::Bytes;
use dryoc::*;
#[cfg(feature = "ed25519-dalek")]
mod ed25519;
use crate::dec;
use crate::enc;
#[cfg(feature = "ed25519-dalek")]
pub use ed25519::*;
const BM_HASH: &str = "h.b2";
/// An error corresponding to a cryptographic check that failed
pub enum CryptoError {
/// A hash verification failed
InvalidHash,
/// A signature verification failed
InvalidSignature,
const BM_SIGNATURE: &str = "sig.ed25519";
const BM_SIGN_KEYPAIR: &str = "sk.ed25519";
const BM_SIGN_PUBKEY: &str = "pk.ed25519";
// ---- types ----
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Hash(pub generichash::Hash);
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Signature(pub sign::Signature);
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct SigningPublicKey(pub sign::PublicKey);
#[derive(PartialEq, Clone, Debug)]
pub struct SigningKeyPair(pub sign::SigningKeyPair<sign::PublicKey, sign::SecretKey>);
impl SigningKeyPair {
/// Return the public key of this keypair
pub fn public_key(&self) -> SigningPublicKey {
SigningPublicKey(self.0.public_key.clone())
}
}
// ---- encoding ----
impl enc::Encode for Hash {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_HASH, self.0.as_slice())
}
}
impl enc::Encode for Signature {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGNATURE, self.0.as_slice())
}
}
impl enc::Encode for SigningPublicKey {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGN_PUBKEY, self.0.as_slice())
}
}
impl enc::Encode for SigningKeyPair {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGN_KEYPAIR, self.0.secret_key.as_slice())
}
}
// ---- calculating hashes, signatures, etc ----
/// Compute the hash of a payload with default dryoc parameters and optionnal key
pub fn compute_hash(bytes: &[u8], key: Option<&[u8; 32]>) -> Hash {
Hash(generichash::GenericHash::hash_with_defaults(bytes, key).unwrap())
}
/// Generate a new signing keypair
pub fn gen_signing_keypair() -> SigningKeyPair {
SigningKeyPair(sign::SigningKeyPair::gen_with_defaults())
}
/// Compute the ed25519 signature of a message using a secret key
pub fn compute_signature(message: &[u8], keypair: &SigningKeyPair) -> Signature {
Signature(
keypair
.0
.sign_with_defaults(message)
.unwrap()
.into_parts()
.0,
)
}
/// Verify the ed25519 signature of a message using a public key
pub fn verify_signature(
signature: &Signature,
message: &[u8],
public_key: &SigningPublicKey,
) -> bool {
sign::SignedMessage::from_parts(signature.0.clone(), message.to_vec())
.verify(&public_key.0)
.is_ok()
}
// ---- decode helpers ----
pub trait CryptoDec {
/// Try to interpret this string as a Blake2b512 digest
/// (32-bytes base64 encoded, prefixed by `h.b2:`)
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
/// use nettext::crypto::{compute_hash, CryptoDec};
///
/// let term = decode(b"{
/// message = hello;
/// hash = h.b2:Mk3PAn3UowqTLEQfNlol6GsXPe-kuOWJSCU0cbgbcs8;
/// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// let expected_hash = compute_hash(msg.raw(), None);
/// assert_eq!(hash.hash().unwrap(), expected_hash);
/// ```
fn hash(&self) -> Result<Hash, dec::TypeError>;
/// Try to interpret this string as an ed25519 signature
/// (64 bytes base64 encoded, prefixed by `sig.ed25519:`)
fn signature(&self) -> Result<Signature, dec::TypeError>;
/// Try to interpret this string as an ed25519 keypair
/// (64 bytes base64 encoded, prefixed by `sk.ed25519:`)
fn keypair(&self) -> Result<SigningKeyPair, dec::TypeError>;
/// Try to interpret this string as an ed25519 public key
/// (32 bytes base64 encoded, prefixed by `pk.ed25519:`)
fn public_key(&self) -> Result<SigningPublicKey, dec::TypeError>;
}
impl<'a, 'b> CryptoDec for dec::Term<'a, 'b> {
fn hash(&self) -> Result<Hash, dec::TypeError> {
Ok(Hash(generichash::Hash::from(
self.marked_bytes_exact(BM_HASH)?,
)))
}
/// Try to interpret this string as an ed25519 signature (64 bytes base64 encoded)
fn signature(&self) -> Result<Signature, dec::TypeError> {
Ok(Signature(sign::Signature::from(
self.marked_bytes_exact(BM_SIGNATURE)?,
)))
}
fn keypair(&self) -> Result<SigningKeyPair, dec::TypeError> {
let secret_key = sign::SecretKey::from(self.marked_bytes_exact(BM_SIGN_KEYPAIR)?);
Ok(SigningKeyPair(sign::SigningKeyPair::from_secret_key(
secret_key,
)))
}
fn public_key(&self) -> Result<SigningPublicKey, dec::TypeError> {
Ok(SigningPublicKey(sign::PublicKey::from(
self.marked_bytes_exact(BM_SIGN_PUBKEY)?,
)))
}
}

View file

@ -8,8 +8,11 @@ use nom::{
IResult, InputLength,
};
use crate::dec::{AnyTerm, DecodeError, NonListTerm, Term};
use crate::{is_string_char, is_whitespace, DICT_ASSIGN, DICT_CLOSE, DICT_DELIM, DICT_OPEN};
use crate::dec::{AnyTerm, DecodeError, NonSeqTerm, Term};
use crate::{
is_string_char, is_whitespace, DICT_ASSIGN, DICT_CLOSE, DICT_DELIM, DICT_OPEN, LIST_CLOSE,
LIST_DELIM, LIST_OPEN,
};
// ----
@ -25,21 +28,22 @@ pub fn decode(input: &[u8]) -> std::result::Result<Term<'_, '_>, DecodeError<'_>
fn decode_term(input: &[u8]) -> IResult<&'_ [u8], AnyTerm<'_, '_>> {
let (start, _) = take_while(is_whitespace)(input)?;
let (rest, list) = separated_list1(take_while1(is_whitespace), decode_nonlist_term)(start)?;
let (rest, seq) = separated_list1(take_while1(is_whitespace), decode_nonseq_term)(start)?;
if list.len() == 1 {
Ok((rest, list.into_iter().next().unwrap().into()))
if seq.len() == 1 {
Ok((rest, seq.into_iter().next().unwrap().into()))
} else {
let raw_len = start.input_len() - rest.input_len();
let list_raw = &start[..raw_len];
Ok((rest, AnyTerm::List(list_raw, list)))
let seq_raw = &start[..raw_len];
Ok((rest, AnyTerm::Seq(seq_raw, seq)))
}
}
fn decode_nonlist_term(input: &[u8]) -> IResult<&'_ [u8], NonListTerm<'_, '_>> {
fn decode_nonseq_term(input: &[u8]) -> IResult<&'_ [u8], NonSeqTerm<'_, '_>> {
let (rest, term) = alt((
map(decode_str, NonListTerm::Str),
map(decode_dict, |(raw, d)| NonListTerm::Dict(raw, d)),
map(decode_str, NonSeqTerm::Str),
map(decode_dict, |(raw, d)| NonSeqTerm::Dict(raw, d)),
map(decode_list, |(raw, l)| NonSeqTerm::List(raw, l)),
))(input)?;
Ok((rest, term))
}
@ -81,6 +85,27 @@ fn decode_dict_item(d: &[u8]) -> IResult<&'_ [u8], (&'_ [u8], AnyTerm<'_, '_>)>
Ok((d, (key, value)))
}
type ListType<'a> = (&'a [u8], Vec<AnyTerm<'a, 'a>>);
fn decode_list(list_begin: &[u8]) -> IResult<&'_ [u8], ListType<'_>> {
let (d, _) = tag(&[LIST_OPEN][..])(list_begin)?;
let (d, list) = separated_list0(list_separator, decode_term)(d)?;
let (d, _) = opt(list_separator)(d)?;
let (d, _) = take_while(is_whitespace)(d)?;
let (list_end, _) = tag(&[LIST_CLOSE][..])(d)?;
let raw_len = list_begin.input_len() - list_end.input_len();
let list_raw = &list_begin[..raw_len];
Ok((list_end, (list_raw, list)))
}
fn list_separator(d: &[u8]) -> IResult<&'_ [u8], ()> {
let (d, _) = take_while(is_whitespace)(d)?;
let (d, _) = tag(&[LIST_DELIM][..])(d)?;
Ok((d, ()))
}
// ----
#[cfg(test)]
@ -94,17 +119,17 @@ mod tests {
}
#[test]
fn list_of_str_str() {
fn seq_of_str_str() {
let bytes = b" plop plap plip ploup ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
Ok(AnyTerm::Seq(
b"plop plap plip ploup",
vec![
NonListTerm::Str(b"plop"),
NonListTerm::Str(b"plap"),
NonListTerm::Str(b"plip"),
NonListTerm::Str(b"ploup"),
NonSeqTerm::Str(b"plop"),
NonSeqTerm::Str(b"plap"),
NonSeqTerm::Str(b"plip"),
NonSeqTerm::Str(b"ploup"),
]
)
.into())
@ -113,18 +138,18 @@ mod tests {
#[test]
fn simple_dict() {
let bytes = b" { aze = hello, by = bojzkz pipo, ccde = ke } ";
let bytes = b" { aze = hello; by = bojzkz pipo; ccde = ke } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo, ccde = ke }",
b"{ aze = hello; by = bojzkz pipo; ccde = ke }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
&b"by"[..],
AnyTerm::List(
AnyTerm::Seq(
b"bojzkz pipo",
vec![NonListTerm::Str(b"bojzkz"), NonListTerm::Str(b"pipo")]
vec![NonSeqTerm::Str(b"bojzkz"), NonSeqTerm::Str(b"pipo")]
)
),
(&b"ccde"[..], AnyTerm::Str(b"ke")),
@ -138,18 +163,18 @@ mod tests {
#[test]
fn simple_dict_2() {
let bytes = b" { aze = hello, by = bojzkz pipo , ccde = ke , } ";
let bytes = b" { aze = hello; by = bojzkz pipo ; ccde = ke ; } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo , ccde = ke , }",
b"{ aze = hello; by = bojzkz pipo ; ccde = ke ; }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
&b"by"[..],
AnyTerm::List(
AnyTerm::Seq(
b"bojzkz pipo",
vec![NonListTerm::Str(b"bojzkz"), NonListTerm::Str(b"pipo")]
vec![NonSeqTerm::Str(b"bojzkz"), NonSeqTerm::Str(b"pipo")]
)
),
(&b"ccde"[..], AnyTerm::Str(b"ke")),
@ -161,14 +186,56 @@ mod tests {
);
}
#[test]
fn simple_list() {
let bytes = b" [ hello; bojzkz pipo; ke ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello; bojzkz pipo; ke ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
b"bojzkz pipo",
vec![NonSeqTerm::Str(b"bojzkz"), NonSeqTerm::Str(b"pipo")]
),
AnyTerm::Str(b"ke"),
]
.to_vec()
)
.into())
);
}
#[test]
fn simple_list_2() {
let bytes = b" [ hello; bojzkz pipo ; ke ; ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello; bojzkz pipo ; ke ; ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
b"bojzkz pipo",
vec![NonSeqTerm::Str(b"bojzkz"), NonSeqTerm::Str(b"pipo")]
),
AnyTerm::Str(b"ke"),
]
.to_vec()
)
.into())
);
}
#[test]
fn real_world_1() {
let bytes = b"HEAD alexpubkey";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
Ok(AnyTerm::Seq(
b"HEAD alexpubkey",
vec![NonListTerm::Str(b"HEAD"), NonListTerm::Str(b"alexpubkey")]
vec![NonSeqTerm::Str(b"HEAD"), NonSeqTerm::Str(b"alexpubkey")]
)
.into()),
);
@ -176,25 +243,25 @@ mod tests {
#[test]
fn real_world_2() {
let bytes = b"STANCE sthash stsign { author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }";
let bytes = b"STANCE sthash stsign { author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
Ok(AnyTerm::Seq(
&bytes[..],
vec![
NonListTerm::Str(b"STANCE"),
NonListTerm::Str(b"sthash"),
NonListTerm::Str(b"stsign"),
NonListTerm::Dict(b"{ author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }",
NonSeqTerm::Str(b"STANCE"),
NonSeqTerm::Str(b"sthash"),
NonSeqTerm::Str(b"stsign"),
NonSeqTerm::Dict(b"{ author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }",
[
(&b"author"[..], AnyTerm::Str(b"alexpubkey")),
(&b"height"[..], AnyTerm::Str(b"12")),
(&b"parent"[..], AnyTerm::Str(b"parenthash")),
(&b"data"[..], AnyTerm::List(
(&b"data"[..], AnyTerm::Seq(
b"MESSAGE { text = hello }",
vec![
NonListTerm::Str(b"MESSAGE"),
NonListTerm::Dict(
NonSeqTerm::Str(b"MESSAGE"),
NonSeqTerm::Dict(
b"{ text = hello }",
[
(&b"text"[..], AnyTerm::Str(b"hello")),
@ -209,4 +276,51 @@ mod tests {
]).into(),
));
}
#[test]
fn real_world_3() {
let bytes = b"[ USER john; USER luke; GROUP strategy { owner = USER john; members = [ USER john; USER luke ] } ]";
let user_john = AnyTerm::Seq(
b"USER john",
vec![NonSeqTerm::Str(b"USER"), NonSeqTerm::Str(b"john")],
);
let user_luke = AnyTerm::Seq(
b"USER luke",
vec![NonSeqTerm::Str(b"USER"), NonSeqTerm::Str(b"luke")],
);
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
&bytes[..],
vec![
user_john.clone(),
user_luke.clone(),
AnyTerm::Seq(
b"GROUP strategy { owner = USER john; members = [ USER john; USER luke ] }",
vec![
NonSeqTerm::Str(b"GROUP"),
NonSeqTerm::Str(b"strategy"),
NonSeqTerm::Dict(
b"{ owner = USER john; members = [ USER john; USER luke ] }",
[
(&b"owner"[..], user_john.clone()),
(
&b"members"[..],
AnyTerm::List(
b"[ USER john; USER luke ]",
vec![user_john, user_luke,]
)
)
]
.into_iter()
.collect()
)
]
),
]
)
.into())
);
}
}

View file

@ -1,19 +1,29 @@
use std::fmt;
use err_derive::Error;
use crate::dec::debug;
/// The type of errors returned by helper functions on `Term`
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Error)]
pub enum TypeError {
/// The term could not be decoded in the given type
#[error(display = "Not a {}", _0)]
WrongType(&'static str),
/// The term did not have the correct marker
#[error(display = "Byte marker was not {}", _0)]
WrongMarker(&'static str),
/// The term is not an array of the requested length
#[error(display = "Expected {} items, got {}", _0, _1)]
WrongLength(usize, usize),
/// The dictionnary is missing a key
#[error(display = "Missing key `{}` in dict", _0)]
MissingKey(String),
/// The dictionnary contains an invalid key
#[error(display = "Spurrious/unexpected key `{}` in dict", _0)]
UnexpectedKey(String),
/// The underlying raw string contains garbage (should not happen in theory)
#[error(display = "Garbage in underlying data")]
Garbage,
}
@ -23,18 +33,6 @@ impl From<std::str::Utf8Error> for TypeError {
}
}
impl std::fmt::Display for TypeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TypeError::WrongType(t) => write!(f, "Not a {}", t),
TypeError::WrongLength(n, m) => write!(f, "Expected {} items, got {}", m, n),
TypeError::MissingKey(k) => write!(f, "Missing key `{}` in dict", k),
TypeError::UnexpectedKey(k) => write!(f, "Spurrious/unexpected key `{}` in dict", k),
TypeError::Garbage => write!(f, "Garbage in underlying data"),
}
}
}
// ----
/// The error kind returned by the `decode` function.
@ -64,6 +62,8 @@ impl<'a> std::fmt::Debug for DecodeError<'a> {
}
}
impl<'a> std::error::Error for DecodeError<'a> {}
impl<'a> From<nom::Err<nom::error::Error<&'a [u8]>>> for DecodeError<'a> {
fn from(e: nom::Err<nom::error::Error<&'a [u8]>>) -> DecodeError<'a> {
match e {

View file

@ -5,8 +5,7 @@ mod error;
use std::collections::HashMap;
#[cfg(any(feature = "blake2", feature = "ed25519-dalek"))]
use crate::crypto;
use crate::debug;
pub use decode::*;
pub use error::*;
@ -26,34 +25,42 @@ pub(crate) enum AnyTerm<'a, 'b> {
Str(&'a [u8]),
Dict(&'a [u8], HashMap<&'a [u8], AnyTerm<'a, 'b>>),
DictRef(&'a [u8], &'b HashMap<&'a [u8], AnyTerm<'a, 'b>>),
List(&'a [u8], Vec<NonListTerm<'a, 'b>>),
ListRef(&'a [u8], &'b [NonListTerm<'a, 'b>]),
List(&'a [u8], Vec<AnyTerm<'a, 'b>>),
ListRef(&'a [u8], &'b [AnyTerm<'a, 'b>]),
Seq(&'a [u8], Vec<NonSeqTerm<'a, 'b>>),
SeqRef(&'a [u8], &'b [NonSeqTerm<'a, 'b>]),
}
#[derive(Eq, PartialEq, Clone)]
pub(crate) enum NonListTerm<'a, 'b> {
pub(crate) enum NonSeqTerm<'a, 'b> {
Str(&'a [u8]),
Dict(&'a [u8], HashMap<&'a [u8], AnyTerm<'a, 'b>>),
DictRef(&'a [u8], &'b HashMap<&'a [u8], AnyTerm<'a, 'b>>),
List(&'a [u8], Vec<AnyTerm<'a, 'b>>),
ListRef(&'a [u8], &'b [AnyTerm<'a, 'b>]),
}
impl<'a, 'b> From<NonListTerm<'a, 'b>> for AnyTerm<'a, 'b> {
fn from(x: NonListTerm<'a, 'b>) -> AnyTerm<'a, 'b> {
impl<'a, 'b> From<NonSeqTerm<'a, 'b>> for AnyTerm<'a, 'b> {
fn from(x: NonSeqTerm<'a, 'b>) -> AnyTerm<'a, 'b> {
match x {
NonListTerm::Str(s) => AnyTerm::Str(s),
NonListTerm::Dict(raw, d) => AnyTerm::Dict(raw, d),
NonListTerm::DictRef(raw, d) => AnyTerm::DictRef(raw, d),
NonSeqTerm::Str(s) => AnyTerm::Str(s),
NonSeqTerm::Dict(raw, d) => AnyTerm::Dict(raw, d),
NonSeqTerm::DictRef(raw, d) => AnyTerm::DictRef(raw, d),
NonSeqTerm::List(raw, l) => AnyTerm::List(raw, l),
NonSeqTerm::ListRef(raw, l) => AnyTerm::ListRef(raw, l),
}
}
}
impl<'a, 'b> TryFrom<AnyTerm<'a, 'b>> for NonListTerm<'a, 'b> {
impl<'a, 'b> TryFrom<AnyTerm<'a, 'b>> for NonSeqTerm<'a, 'b> {
type Error = ();
fn try_from(x: AnyTerm<'a, 'b>) -> Result<NonListTerm<'a, 'b>, ()> {
fn try_from(x: AnyTerm<'a, 'b>) -> Result<NonSeqTerm<'a, 'b>, ()> {
match x {
AnyTerm::Str(s) => Ok(NonListTerm::Str(s)),
AnyTerm::Dict(raw, d) => Ok(NonListTerm::Dict(raw, d)),
AnyTerm::DictRef(raw, d) => Ok(NonListTerm::DictRef(raw, d)),
AnyTerm::Str(s) => Ok(NonSeqTerm::Str(s)),
AnyTerm::Dict(raw, d) => Ok(NonSeqTerm::Dict(raw, d)),
AnyTerm::DictRef(raw, d) => Ok(NonSeqTerm::DictRef(raw, d)),
AnyTerm::List(raw, l) => Ok(NonSeqTerm::List(raw, l)),
AnyTerm::ListRef(raw, l) => Ok(NonSeqTerm::ListRef(raw, l)),
_ => Err(()),
}
}
@ -84,6 +91,20 @@ impl<'a, 'b> Term<'a, 'b> {
self.0.raw()
}
/// Get the term's raw representation as an str
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"hello { a = x; b = y }").unwrap();
/// assert_eq!(term.raw_str().unwrap(), "hello { a = x; b = y }");
/// ```
pub fn raw_str(&self) -> Result<&'a str, TypeError> {
Ok(std::str::from_utf8(self.0.raw())?)
}
/// If the term is a single string, get that string
///
/// Example:
@ -104,7 +125,7 @@ impl<'a, 'b> Term<'a, 'b> {
}
}
/// If the term is a single string, or a list containing only strings,
/// If the term is a single string, or a sequence containing only strings,
/// get its raw representation
///
/// Example:
@ -121,16 +142,16 @@ impl<'a, 'b> Term<'a, 'b> {
pub fn string(&self) -> Result<&'a str, TypeError> {
match &self.0 {
AnyTerm::Str(s) => Ok(std::str::from_utf8(s)?),
AnyTerm::List(r, l) if l.iter().all(|x| matches!(x, NonListTerm::Str(_))) => {
AnyTerm::Seq(r, l) if l.iter().all(|x| matches!(x, NonSeqTerm::Str(_))) => {
Ok(std::str::from_utf8(r)?)
}
_ => Err(TypeError::WrongType("STRING")),
}
}
/// Return a list of terms made from this term.
/// If it is a str or a dict, returns a list of a single term.
/// If it is a list, that's the list of terms we return.
/// Return a sequence of terms made from this term.
/// If it is a str or a dict, returns a seq of a single term.
/// If it is a sequence, that's the seq of terms we return.
///
/// Example:
///
@ -138,26 +159,26 @@ impl<'a, 'b> Term<'a, 'b> {
/// use nettext::dec::decode;
///
/// let term1 = decode(b"hello").unwrap();
/// let list1 = term1.list();
/// assert_eq!(list1.len(), 1);
/// assert_eq!(list1[0].str().unwrap(), "hello");
/// let seq1 = term1.seq();
/// assert_eq!(seq1.len(), 1);
/// assert_eq!(seq1[0].str().unwrap(), "hello");
///
/// let term2 = decode(b"hello world").unwrap();
/// let list2 = term2.list();
/// assert_eq!(list2.len(), 2);
/// assert_eq!(list2[0].str().unwrap(), "hello");
/// assert_eq!(list2[1].str().unwrap(), "world");
/// let seq2 = term2.seq();
/// assert_eq!(seq2.len(), 2);
/// assert_eq!(seq2[0].str().unwrap(), "hello");
/// assert_eq!(seq2[1].str().unwrap(), "world");
/// ```
pub fn list(&self) -> Vec<Term<'a, '_>> {
pub fn seq(&self) -> Vec<Term<'a, '_>> {
match self.0.mkref() {
AnyTerm::ListRef(_r, l) => l.iter().map(|x| Term(x.mkref().into())).collect::<Vec<_>>(),
AnyTerm::SeqRef(_r, l) => l.iter().map(|x| Term(x.mkref().into())).collect::<Vec<_>>(),
x => vec![Term(x)],
}
}
/// Same as `.list()`, but deconstructs it in a const length array,
/// Same as `.seq()`, but deconstructs it in a const length array,
/// dynamically checking if there are the correct number of items.
/// This allows to directly bind the resulting list into discrete variables.
/// This allows to directly bind the resulting seq into discrete variables.
///
/// Example:
///
@ -165,24 +186,24 @@ impl<'a, 'b> Term<'a, 'b> {
/// use nettext::dec::decode;
///
/// let term1 = decode(b"hello").unwrap();
/// let [s1] = term1.list_of().unwrap();
/// let [s1] = term1.seq_of().unwrap();
/// assert_eq!(s1.str().unwrap(), "hello");
///
/// let term2 = decode(b"hello world").unwrap();
/// let [s2a, s2b] = term2.list_of().unwrap();
/// let [s2a, s2b] = term2.seq_of().unwrap();
/// assert_eq!(s2a.str().unwrap(), "hello");
/// assert_eq!(s2b.str().unwrap(), "world");
/// ```
pub fn list_of<const N: usize>(&self) -> Result<[Term<'a, '_>; N], TypeError> {
let list = self.list();
let list_len = list.len();
list.try_into()
.map_err(|_| TypeError::WrongLength(list_len, N))
pub fn seq_of<const N: usize>(&self) -> Result<[Term<'a, '_>; N], TypeError> {
let seq = self.seq();
let seq_len = seq.len();
seq.try_into()
.map_err(|_| TypeError::WrongLength(seq_len, N))
}
/// Same as `.list_of()`, but only binds the first N-1 terms.
/// Same as `.seq_of()`, but only binds the first N-1 terms.
/// If there are exactly N terms, the last one is bound to the Nth return variable.
/// If there are more then N terms, the remaining terms are bound to a new list term
/// If there are more then N terms, the remaining terms are bound to a new seq term
/// that is returned as the Nth return variable.
///
/// Example:
@ -191,21 +212,21 @@ impl<'a, 'b> Term<'a, 'b> {
/// use nettext::dec::decode;
///
/// let term1 = decode(b"hello world").unwrap();
/// let [s1a, s1b] = term1.list_of_first().unwrap();
/// let [s1a, s1b] = term1.seq_of_first().unwrap();
/// assert_eq!(s1a.str().unwrap(), "hello");
/// assert_eq!(s1b.str().unwrap(), "world");
///
/// let term2 = decode(b"hello mighty world").unwrap();
/// let [s2a, s2b] = term2.list_of_first().unwrap();
/// let [s2a, s2b] = term2.seq_of_first().unwrap();
/// assert_eq!(s2a.str().unwrap(), "hello");
/// assert_eq!(s2b.list().len(), 2);
/// assert_eq!(s2b.seq().len(), 2);
/// assert_eq!(s2b.raw(), b"mighty world");
/// ```
pub fn list_of_first<const N: usize>(&self) -> Result<[Term<'a, '_>; N], TypeError> {
pub fn seq_of_first<const N: usize>(&self) -> Result<[Term<'a, '_>; N], TypeError> {
match self.0.mkref() {
AnyTerm::ListRef(raw, list) => match list.len().cmp(&N) {
std::cmp::Ordering::Less => Err(TypeError::WrongLength(list.len(), N)),
std::cmp::Ordering::Equal => Ok(list
AnyTerm::SeqRef(raw, seq) => match seq.len().cmp(&N) {
std::cmp::Ordering::Less => Err(TypeError::WrongLength(seq.len(), N)),
std::cmp::Ordering::Equal => Ok(seq
.iter()
.map(|x| Term(x.mkref().into()))
.collect::<Vec<_>>()
@ -213,15 +234,15 @@ impl<'a, 'b> Term<'a, 'b> {
.unwrap()),
std::cmp::Ordering::Greater => {
let mut ret = Vec::with_capacity(N);
for item in list[0..N - 1].iter() {
for item in seq[0..N - 1].iter() {
ret.push(Term(item.mkref().into()));
}
let remaining_begin = list[N - 1].raw().as_ptr() as usize;
let remaining_begin = seq[N - 1].raw().as_ptr() as usize;
let remaining_offset = remaining_begin - raw.as_ptr() as usize;
let remaining_raw = &raw[remaining_offset..];
ret.push(Term(AnyTerm::ListRef(remaining_raw, &list[N - 1..])));
ret.push(Term(AnyTerm::SeqRef(remaining_raw, &seq[N - 1..])));
Ok(ret.try_into().unwrap())
}
@ -242,7 +263,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2 }").unwrap();
/// let dict = term.dict().unwrap();
/// assert_eq!(dict.get("k1").unwrap().str().unwrap(), "v1");
/// assert_eq!(dict.get("k2").unwrap().str().unwrap(), "v2");
@ -261,14 +282,14 @@ impl<'a, 'b> Term<'a, 'b> {
}
/// Checks term is a dictionnary whose keys are exactly those supplied,
/// and returns the associated values as a list.
/// and returns the associated values as a seq.
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k3 = v3 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k3 = v3 }").unwrap();
/// let [s1, s2] = term.dict_of(["k1", "k2"], true).unwrap();
/// assert_eq!(s1.str().unwrap(), "v1");
/// assert_eq!(s2.str().unwrap(), "v2");
@ -303,14 +324,14 @@ impl<'a, 'b> Term<'a, 'b> {
}
/// Checks term is a dictionnary whose keys are included in those supplied,
/// and returns the associated values as a list of options.
/// and returns the associated values as a seq of options.
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k4 = v4 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k4 = v4 }").unwrap();
/// let [s1, s2, s3] = term.dict_of_opt(["k1", "k2", "k3"], true).unwrap();
/// assert_eq!(s1.unwrap().str().unwrap(), "v1");
/// assert_eq!(s2.unwrap().str().unwrap(), "v2");
@ -339,28 +360,25 @@ impl<'a, 'b> Term<'a, 'b> {
}
}
/// Checks term is a dictionary with a single key `.`,
/// and returns the associated value.
/// Checks if the term is a list, and if so, return its elements in a vec.
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ . = a b c d e }").unwrap();
/// assert_eq!(term.nested().unwrap().raw(), b"a b c d e");
/// let term2 = decode(b"[ hello; world ]").unwrap();
/// let seq2 = term2.list().unwrap();
/// assert_eq!(seq2.len(), 2);
/// assert_eq!(seq2[0].str().unwrap(), "hello");
/// assert_eq!(seq2[1].str().unwrap(), "world");
/// ```
pub fn nested(&self) -> Result<Term<'a, '_>, TypeError> {
pub fn list(&self) -> Result<Vec<Term<'a, '_>>, TypeError> {
match self.0.mkref() {
AnyTerm::DictRef(_, d) if d.len() == 1 => {
let (k, v) = d.iter().next().unwrap();
if k != b"." {
Err(TypeError::WrongType("NESTED"))
} else {
Ok(Term(v.mkref()))
}
AnyTerm::ListRef(_r, l) => {
Ok(l.iter().map(|x| Term(x.mkref().into())).collect::<Vec<_>>())
}
_ => Err(TypeError::WrongType("NESTED")),
_ => Err(TypeError::WrongType("LIST")),
}
}
@ -399,16 +417,16 @@ impl<'a, 'b> Term<'a, 'b> {
};
match self.0.mkref() {
AnyTerm::Str(encoded) => {
if encoded == b"." {
if encoded == b"-" {
Ok(vec![])
} else {
decode(encoded)
}
}
AnyTerm::ListRef(_, list) => {
AnyTerm::SeqRef(_, seq) => {
let mut ret = Vec::with_capacity(128);
for term in list.iter() {
if let NonListTerm::Str(encoded) = term {
for term in seq.iter() {
if let NonSeqTerm::Str(encoded) = term {
ret.extend(decode(encoded)?)
} else {
return Err(TypeError::WrongType("BYTES"));
@ -421,72 +439,40 @@ impl<'a, 'b> Term<'a, 'b> {
}
/// Try to interpret this string as base64-encoded bytes,
/// with an exact length.
/// with a marker prefix and an exact byte length.
/// This is typically used for cryptographic data types such as hashes,
/// keys, signatures, ...
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"aGVsbG8sIHdvcmxkIQ").unwrap();
/// assert_eq!(&term.bytes_exact::<13>().unwrap(), b"hello, world!");
/// let term = decode(b"test:aGVsbG8sIHdvcmxkIQ").unwrap();
/// assert_eq!(&term.marked_bytes_exact::<13>("test").unwrap(), b"hello, world!");
/// ```
pub fn bytes_exact<const N: usize>(&self) -> Result<[u8; N], TypeError> {
let bytes = self.bytes()?;
let bytes_len = bytes.len();
bytes
.try_into()
.map_err(|_| TypeError::WrongLength(bytes_len, N))
}
}
// ---- CRYPTO HELPERS ----
#[cfg(feature = "blake2")]
impl<'a, 'b> Term<'a, 'b> {
/// Try to interpret this string as a Blake2b512 digest (32-bytes base64 encoded)
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{
/// message = hello,
/// hash = 5M-jmj03vjHFlgnoB5cHmcqmihm_qhUTXxZQheAdQaZboeGxRq62vQCStJ6sIUwQPM-jo2WVS7vlL3Sis2IMlA
/// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// assert!(hash.b2sum().unwrap().verify(msg.raw()).is_ok());
/// ```
pub fn b2sum(&self) -> Result<crypto::Blake2Sum, TypeError> {
Ok(crypto::Blake2Sum::from_bytes(self.bytes_exact()?))
}
}
#[cfg(feature = "ed25519-dalek")]
impl<'a, 'b> Term<'a, 'b> {
/// Try to interpret this string as an ed25519 keypair (64 bytes base64 encoded)
pub fn keypair(&self) -> Result<crypto::Keypair, TypeError> {
let bytes = self.bytes_exact::<64>()?;
crypto::Keypair::from_bytes(&bytes).map_err(|_| TypeError::WrongType("KEYPAIR"))
}
/// Try to interpret this string as an ed25519 public key (32 bytes base64 encoded)
pub fn public_key(&self) -> Result<crypto::PublicKey, TypeError> {
let bytes = self.bytes_exact::<32>()?;
crypto::PublicKey::from_bytes(&bytes).map_err(|_| TypeError::WrongType("PUBLICKEY"))
}
/// Try to interpret this string as an ed25519 secret key (32 bytes base64 encoded)
pub fn secret_key(&self) -> Result<crypto::SecretKey, TypeError> {
let bytes = self.bytes_exact::<32>()?;
crypto::SecretKey::from_bytes(&bytes).map_err(|_| TypeError::WrongType("SECRETKEY"))
}
/// Try to interpret this string as an ed25519 signature (64 bytes base64 encoded)
pub fn signature(&self) -> Result<crypto::Signature, TypeError> {
let bytes = self.bytes_exact::<64>()?;
crypto::Signature::from_bytes(&bytes).map_err(|_| TypeError::WrongType("SIGNATURE"))
pub fn marked_bytes_exact<const N: usize>(
&self,
marker: &'static str,
) -> Result<[u8; N], TypeError> {
let mkr = marker.as_bytes();
match &self.0 {
AnyTerm::Str(s)
if s.len() >= mkr.len() + 2 && &s[..mkr.len()] == mkr && s[mkr.len()] == b':' =>
{
let bytes = match &s[mkr.len() + 1..] {
b"-" => vec![],
bytes => base64::decode_config(bytes, base64::URL_SAFE_NO_PAD)
.map_err(|_| TypeError::WrongType("BYTES"))?,
};
let bytes_len = bytes.len();
bytes
.try_into()
.map_err(|_| TypeError::WrongLength(bytes_len, N))
}
AnyTerm::Str(_) => Err(TypeError::WrongMarker(marker)),
_ => Err(TypeError::WrongType("BYTES")),
}
}
}
@ -499,7 +485,9 @@ impl<'a, 'b> AnyTerm<'a, 'b> {
AnyTerm::Dict(r, _)
| AnyTerm::DictRef(r, _)
| AnyTerm::List(r, _)
| AnyTerm::ListRef(r, _) => r,
| AnyTerm::ListRef(r, _)
| AnyTerm::Seq(r, _)
| AnyTerm::SeqRef(r, _) => r,
}
}
@ -508,25 +496,30 @@ impl<'a, 'b> AnyTerm<'a, 'b> {
AnyTerm::Str(s) => AnyTerm::Str(s),
AnyTerm::Dict(r, d) => AnyTerm::DictRef(r, d),
AnyTerm::DictRef(r, d) => AnyTerm::DictRef(r, d),
AnyTerm::List(r, l) => AnyTerm::ListRef(r, &l[..]),
AnyTerm::List(r, l) => AnyTerm::ListRef(r, l),
AnyTerm::ListRef(r, l) => AnyTerm::ListRef(r, l),
AnyTerm::Seq(r, l) => AnyTerm::SeqRef(r, &l[..]),
AnyTerm::SeqRef(r, l) => AnyTerm::SeqRef(r, l),
}
}
}
impl<'a, 'b> NonListTerm<'a, 'b> {
impl<'a, 'b> NonSeqTerm<'a, 'b> {
fn raw(&self) -> &'a [u8] {
match &self {
NonListTerm::Str(s) => s,
NonListTerm::Dict(r, _) | NonListTerm::DictRef(r, _) => r,
NonSeqTerm::Str(s) => s,
NonSeqTerm::Dict(r, _) | NonSeqTerm::DictRef(r, _) => r,
NonSeqTerm::List(r, _) | NonSeqTerm::ListRef(r, _) => r,
}
}
fn mkref(&self) -> NonListTerm<'a, '_> {
fn mkref(&self) -> NonSeqTerm<'a, '_> {
match &self {
NonListTerm::Str(s) => NonListTerm::Str(s),
NonListTerm::Dict(r, d) => NonListTerm::DictRef(r, d),
NonListTerm::DictRef(r, d) => NonListTerm::DictRef(r, d),
NonSeqTerm::Str(s) => NonSeqTerm::Str(s),
NonSeqTerm::Dict(r, d) => NonSeqTerm::DictRef(r, d),
NonSeqTerm::DictRef(r, d) => NonSeqTerm::DictRef(r, d),
NonSeqTerm::List(r, l) => NonSeqTerm::ListRef(r, l),
NonSeqTerm::ListRef(r, l) => NonSeqTerm::ListRef(r, l),
}
}
}
@ -551,10 +544,6 @@ impl<'a, 'b> std::fmt::Display for Term<'a, 'b> {
// ---- DEBUG REPR ----
pub(crate) fn debug(x: &[u8]) -> &str {
std::str::from_utf8(x).unwrap_or("<invalid ascii>")
}
impl<'a, 'b> std::fmt::Debug for AnyTerm<'a, 'b> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
match self.mkref() {
@ -573,12 +562,19 @@ impl<'a, 'b> std::fmt::Debug for AnyTerm<'a, 'b> {
}
write!(f, "]")
}
AnyTerm::SeqRef(raw, l) => {
write!(f, "Seq[`{}`", debug(raw))?;
for i in l.iter() {
write!(f, "\n {:?}", i)?;
}
write!(f, "]")
}
_ => unreachable!(),
}
}
}
impl<'a, 'b> std::fmt::Debug for NonListTerm<'a, 'b> {
impl<'a, 'b> std::fmt::Debug for NonSeqTerm<'a, 'b> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
AnyTerm::from(self.mkref()).fmt(f)
}

View file

@ -1,23 +1,18 @@
use std::fmt;
use err_derive::Error;
/// An error that happenned when creating a nettext encoder term
#[derive(Debug)]
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "Invalid character '{}'", _0)]
InvalidCharacter(u8),
#[error(display = "Invalid RAW nettext litteral")]
InvalidRaw,
#[error(display = "Tried to insert into a term that isn't a dictionnary")]
NotADictionnary,
#[error(display = "Duplicate key: {}", _0)]
DuplicateKey(String),
ListInList,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::InvalidCharacter(c) => write!(f, "Invalid character '{}'", *c as char),
Error::InvalidRaw => write!(f, "Invalid RAW nettext litteral"),
Error::NotADictionnary => write!(f, "Tried to insert into a term that isn't a dictionnary"),
Error::DuplicateKey(s) => write!(f, "Duplicate dict key: {}", s),
Error::ListInList => write!(f, "Refusing to build nested lists with list(), use either list_flatten() or list_nested()"),
}
}
#[error(
display = "Refusing to build nested sequencess with seq(), use either seq_flatten() or seq_nested()"
)]
SeqInSeq,
}

View file

@ -5,13 +5,13 @@
//! ```
//! use nettext::enc::*;
//!
//! let nettext_encoding = list([
//! let nettext_encoding = seq([
//! string("CALL").unwrap(),
//! string("myfunction").unwrap(),
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! ]).unwrap().encode();
@ -23,6 +23,7 @@ use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use crate::dec::{self, decode};
use crate::*;
use crate::{is_string_char, is_whitespace};
pub use error::Error;
@ -35,6 +36,7 @@ enum T<'a> {
OwnedStr(Vec<u8>),
Dict(HashMap<Cow<'a, [u8]>, T<'a>>),
List(Vec<T<'a>>),
Seq(Vec<T<'a>>),
}
/// The result type for trying to encode something as nettext
@ -86,7 +88,7 @@ pub fn string_owned(s: String) -> Result<'static> {
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(raw(b"Hello { a = b, c = d} .").unwrap().encode(), b"Hello { a = b, c = d} .");
/// assert_eq!(raw(b"Hello { a = b; c = d} .").unwrap().encode(), b"Hello { a = b; c = d} .");
/// ```
pub fn raw(bytes: &[u8]) -> Result<'_> {
if decode(bytes).is_err() {
@ -95,7 +97,118 @@ pub fn raw(bytes: &[u8]) -> Result<'_> {
Ok(Term(T::Str(bytes)))
}
/// Term corresponding to a list of terms
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding.
/// Since empty strings are not possible in nettext,
/// an empty byte string is encoded as the special string `-`.
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(bytes(b"").encode(), b"-");
/// assert_eq!(bytes(b"hello, world!").encode(), b"aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn bytes(bytes: &[u8]) -> Term<'static> {
if bytes.is_empty() {
Term(T::Str(b"-"))
} else {
Term(T::OwnedStr(
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD).into_bytes(),
))
}
}
/// Same as `bytes()`, but splits the byte slice in 48-byte chunks
/// and encodes each chunk separately, putting them in a sequence of terms.
/// Usefull for long byte slices to have cleaner representations.
pub fn bytes_split(bytes: &[u8]) -> Term<'static> {
if bytes.is_empty() {
Term(T::Str(b"-"))
} else {
let chunks = bytes
.chunks(48)
.map(|b| T::OwnedStr(base64::encode_config(b, base64::URL_SAFE_NO_PAD).into_bytes()))
.collect::<Vec<_>>();
if chunks.len() > 1 {
Term(T::Seq(chunks))
} else {
Term(chunks.into_iter().next().unwrap())
}
}
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding,
/// with a prefix used to identify its content type.
/// The marker prefix is typically used in crypto settings to identify
/// a cryptographic protocol or algorithm; it may not contain the `:` character.
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(marked_bytes("mytype", b"").unwrap().encode(), b"mytype:-");
/// assert_eq!(marked_bytes("mytype", b"hello, world!").unwrap().encode(), b"mytype:aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn marked_bytes(marker: &str, bytes: &[u8]) -> Result<'static> {
for c in marker.as_bytes().iter() {
if !is_string_char(*c) || *c == b':' {
return Err(Error::InvalidCharacter(*c));
}
}
if bytes.is_empty() {
Ok(Term(T::OwnedStr(format!("{}:-", marker).into_bytes())))
} else {
Ok(Term(T::OwnedStr(
format!(
"{}:{}",
marker,
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD)
)
.into_bytes(),
)))
}
}
// ---- composed terms -----
/// Term corresponding to a sequence of terms. Subsequences are banned and will raise an error.
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(seq([
/// string("Hello").unwrap(),
/// string("world").unwrap()
/// ]).unwrap().encode(), b"Hello world");
/// ```
pub fn seq<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Result<'a> {
let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::Seq(_) => return Err(Error::SeqInSeq),
x => tmp.push(x),
}
}
Ok(Term(T::Seq(tmp)))
}
/// Term corresponding to a sequence of terms. Sub-sequences are flattenned.
pub fn seq_flatten<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::Seq(t) => tmp.extend(t),
x => tmp.push(x),
}
}
Term(T::Seq(tmp))
}
/// Term corresponding to a list of terms.
///
/// ```
/// use nettext::enc::*;
@ -103,41 +216,11 @@ pub fn raw(bytes: &[u8]) -> Result<'_> {
/// assert_eq!(list([
/// string("Hello").unwrap(),
/// string("world").unwrap()
/// ]).unwrap().encode(), b"Hello world");
/// ]).encode(), b"[\n Hello;\n world;\n]");
/// ```
pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Result<'a> {
let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::List(_) => return Err(Error::ListInList),
x => tmp.push(x),
}
}
Ok(Term(T::List(tmp)))
}
/// Term corresponding to a list of terms. Sub-lists are flattenned.
pub fn list_flatten<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Result<'a> {
let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::List(t) => tmp.extend(t),
x => tmp.push(x),
}
}
Ok(Term(T::List(tmp)))
}
/// Term corresponding to a list of terms. Sub-lists are represented as NESTED: `{.= sub list items }`.
pub fn list_nested<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Result<'a> {
let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::List(t) => tmp.push(Term(T::List(t)).nested().0),
x => tmp.push(x),
}
}
Ok(Term(T::List(tmp)))
pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
let terms = terms.into_iter().map(|x| x.0).collect::<Vec<_>>();
Term(T::List(terms))
}
/// Term corresponding to a dictionnary of items
@ -148,7 +231,7 @@ pub fn list_nested<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Result<'a>
/// assert_eq!(dict([
/// ("a", string("Hello").unwrap()),
/// ("b", string("world").unwrap())
/// ]).unwrap().encode(), b"{\n a = Hello,\n b = world,\n}");
/// ]).unwrap().encode(), b"{\n a = Hello;\n b = world;\n}");
/// ```
pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Result<'a> {
let mut tmp = HashMap::new();
@ -160,49 +243,17 @@ pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Result
Ok(Term(T::Dict(tmp)))
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(bytes(b"hello, world!").encode(), b"aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn bytes(bytes: &[u8]) -> Term<'static> {
Term(T::OwnedStr(
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD).into_bytes(),
))
}
/// Same as `bytes()`, but splits the byte slice in 48-byte chunks
/// and encodes each chunk separately, putting them in a list of terms.
/// Usefull for long byte slices to have cleaner representations,
/// mainly usefull for dictionnary keys.
pub fn bytes_split(bytes: &[u8]) -> Term<'static> {
let chunks = bytes
.chunks(48)
.map(|b| T::OwnedStr(base64::encode_config(b, base64::URL_SAFE_NO_PAD).into_bytes()))
.collect::<Vec<_>>();
if chunks.len() > 1 {
Term(T::List(chunks))
} else {
Term(chunks.into_iter().next().unwrap_or(T::Str(b".")))
}
}
impl<'a> Term<'a> {
/// Append a term to an existing term.
/// Transforms the initial term into a list if necessary.
/// Transforms the initial term into a seq if necessary.
#[must_use]
pub fn append(self, t: Term<'a>) -> Term<'a> {
match self.0 {
T::List(mut v) => {
T::Seq(mut v) => {
v.push(t.0);
Term(T::List(v))
Term(T::Seq(v))
}
x => Term(T::List(vec![x, t.0])),
x => Term(T::Seq(vec![x, t.0])),
}
}
@ -219,21 +270,6 @@ impl<'a> Term<'a> {
_ => Err(Error::NotADictionnary),
}
}
/// Makes a NESTED term of this term, by putting it in a dict
/// with a single key `.`.
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(list([string("hello").unwrap(), string("world").unwrap()]).unwrap().nested().encode(), b"{.= hello world }");
/// ```
#[must_use]
pub fn nested(self) -> Term<'a> {
dict([(".", self)]).unwrap()
}
}
// ---- additional internal functions for serde module ----
@ -268,6 +304,18 @@ impl<'a> Term<'a> {
self.0.encode_aux(&mut buf, 0, true);
buf
}
/// Generate the nettext representation of a term, as a String
pub fn encode_string(self) -> String {
unsafe { String::from_utf8_unchecked(self.encode()) }
}
/// Generate the concise nettext representation of a term
pub fn encode_concise(self) -> Vec<u8> {
let mut buf = Vec::with_capacity(128);
self.0.encode_concise_aux(&mut buf);
buf
}
}
impl<'a> T<'a> {
@ -277,20 +325,16 @@ impl<'a> T<'a> {
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
if d.is_empty() {
buf.extend_from_slice(b"{}");
buf.extend_from_slice(&[DICT_OPEN, DICT_CLOSE]);
} else if d.len() == 1 {
let (k, v) = d.into_iter().next().unwrap();
if k.as_ref() == b"." {
buf.extend_from_slice(b"{.= ");
} else {
buf.extend_from_slice(b"{ ");
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
}
buf.extend_from_slice(&[DICT_OPEN, b' ']);
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(b" }");
buf.extend_from_slice(&[b' ', DICT_CLOSE]);
} else {
buf.extend_from_slice(b"{\n");
buf.extend_from_slice(&[DICT_OPEN, b'\n']);
let indent2 = indent + 2;
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
@ -300,17 +344,43 @@ impl<'a> T<'a> {
buf.push(b' ');
}
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent2, false);
buf.extend_from_slice(b",\n");
buf.extend_from_slice(&[DICT_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(b'}');
buf.push(DICT_CLOSE);
}
}
T::List(l) => {
if l.len() == 0 {
buf.extend_from_slice(&[LIST_OPEN, LIST_CLOSE]);
} else if l.len() == 1 {
buf.extend_from_slice(&[LIST_OPEN, b' ']);
l.into_iter()
.next()
.unwrap()
.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(&[b' ', LIST_CLOSE]);
} else {
let indent2 = indent + 2;
buf.extend_from_slice(&[LIST_OPEN, b'\n']);
for item in l {
for _ in 0..indent2 {
buf.push(b' ');
}
item.encode_aux(buf, indent2, false);
buf.extend_from_slice(&[LIST_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(LIST_CLOSE);
}
}
T::Seq(l) => {
let indent2 = indent + 2;
for (i, v) in l.into_iter().enumerate() {
if !is_toplevel && buf.iter().rev().take_while(|c| **c != b'\n').count() >= 70 {
@ -326,56 +396,94 @@ impl<'a> T<'a> {
}
}
}
fn encode_concise_aux(self, buf: &mut Vec<u8>) {
match self {
T::Str(s) => buf.extend_from_slice(s),
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
buf.push(DICT_OPEN);
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
for (i, k) in keys.into_iter().enumerate() {
if i > 0 {
buf.push(DICT_DELIM);
}
let v = d.remove(&k).unwrap();
buf.extend_from_slice(k.borrow());
buf.push(DICT_ASSIGN);
v.encode_concise_aux(buf);
}
buf.push(DICT_CLOSE);
}
T::List(l) => {
buf.push(LIST_OPEN);
for (i, item) in l.into_iter().enumerate() {
if i > 0 {
buf.push(LIST_DELIM);
}
item.encode_concise_aux(buf);
}
buf.push(LIST_CLOSE);
}
T::Seq(l) => {
for (i, v) in l.into_iter().enumerate() {
if i > 0 {
buf.push(b' ');
}
v.encode_concise_aux(buf);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::debug;
#[test]
fn complex1() {
let input = list([
let input = seq([
string("HELLO").unwrap(),
string("alexhelloworld").unwrap(),
list([string("dude").unwrap(), string("why").unwrap()]),
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop, f2 = kuko }").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap(),
])
.unwrap();
let expected = b"HELLO alexhelloworld {
data = { f1 = plop, f2 = kuko },
from = jxx,
subject = hello,
let expected = "HELLO alexhelloworld [
dude;
why;
] {
data = { f1 = plop; f2 = kuko };
from = jxx;
subject = hello;
}";
let enc = input.encode();
eprintln!("{}", std::str::from_utf8(&enc).unwrap());
eprintln!("{}", std::str::from_utf8(&expected[..]).unwrap());
assert_eq!(&enc, &expected[..]);
assert_eq!(debug(&input.encode()), expected);
}
#[test]
fn nested() {
assert!(list([
string("a").unwrap(),
string("b").unwrap(),
list([string("c").unwrap(), string("d").unwrap()]).unwrap()
])
.is_err());
assert_eq!(
list([
string("a").unwrap(),
string("b").unwrap(),
list([string("c").unwrap(), string("d").unwrap()])
.unwrap()
.nested()
fn complex1_concise() {
let input = seq([
string("HELLO").unwrap(),
string("alexhelloworld").unwrap(),
list([string("dude").unwrap(), string("why").unwrap()]),
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap()
.encode(),
b"a b {.= c d }"
);
.unwrap(),
])
.unwrap();
let expected_concise = "HELLO alexhelloworld [dude;why] {data={ f1 = plop; f2 = kuko };from=jxx;subject=hello}";
assert_eq!(debug(&input.encode_concise()), expected_concise);
}
}

View file

@ -3,88 +3,93 @@
//! ```
//! use nettext::enc::*;
//! use nettext::dec::*;
//! use nettext::crypto::{self, Signer, Verifier};
//! use nettext::crypto::*;
//!
//! let keypair = crypto::generate_keypair();
//! let final_payload = {
//! let keypair = gen_signing_keypair();
//!
//! // Encode a fist object that represents a payload that will be hashed and signed
//! let text1 = list([
//! string("CALL").unwrap(),
//! string("myfunction").unwrap(),
//! // Encode a fist object that represents a payload that will be hashed and signed
//! let signed_payload = seq([
//! string("CALL").unwrap(),
//! string("myfunction").unwrap(),
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! keypair.public_key().term().unwrap(),
//! ]).unwrap().encode();
//! eprintln!("{}", std::str::from_utf8(&signed_payload).unwrap());
//!
//! let hash = compute_hash(&signed_payload, None);
//! let sign = compute_signature(&signed_payload[..], &keypair);
//!
//! // Encode a second object that represents the signed and hashed payload
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! keypair.public.term().unwrap(),
//! ]).unwrap().encode();
//! eprintln!("{}", std::str::from_utf8(&text1).unwrap());
//!
//! let hash = crypto::Blake2Sum::compute(&text1);
//! let sign = keypair.sign(&text1);
//!
//! // Encode a second object that represents the signed and hashed payload
//! let text2 = dict([
//! ("hash", hash.term().unwrap()),
//! ("signature", sign.term().unwrap()),
//! ("payload", raw(&text1).unwrap()),
//! ]).unwrap().encode();
//! eprintln!("{}", std::str::from_utf8(&text2).unwrap());
//! ("hash", hash.term().unwrap()),
//! ("signature", sign.term().unwrap()),
//! ("payload", raw(&signed_payload).unwrap()),
//! ]).unwrap().encode()
//! };
//! eprintln!("{}", std::str::from_utf8(&final_payload).unwrap());
//!
//! // Decode and check everything is fine
//! let object1 = decode(&text2).unwrap();
//! let [hash, signature, payload] = object1.dict_of(["hash", "signature", "payload"], false).unwrap();
//! assert!(hash.b2sum().unwrap().verify(payload.raw()).is_ok());
//! assert_eq!(payload.raw(), text1);
//! let signed_object = decode(&final_payload).unwrap();
//! let [hash, signature, payload] = signed_object.dict_of(["hash", "signature", "payload"], false).unwrap();
//! let hash = hash.hash().unwrap();
//! let signature = signature.signature().unwrap();
//! let expected_hash = compute_hash(payload.raw(), None);
//! assert_eq!(hash, expected_hash);
//!
//! let object2 = decode(payload.raw()).unwrap();
//!
//! let [verb, arg1, arg2, pubkey] = object2.list_of().unwrap();
//! let [verb, arg1, arg2, pubkey] = object2.seq_of().unwrap();
//! let pubkey = pubkey.public_key().unwrap();
//! assert!(pubkey.verify(payload.raw(), &signature.signature().unwrap()).is_ok());
//! assert!(verify_signature(&signature, payload.raw(), &pubkey));
//!
//! assert_eq!(verb.string().unwrap(), "CALL");
//! assert_eq!(arg1.string().unwrap(), "myfunction");
//! assert_eq!(pubkey, keypair.public);
//! ```
//!
//! The value of `text1` would be as follows:
//! The value of `signed_payload` would be as follows:
//!
//! ```raw
//! CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } 1hUAS2C0lzHXHWIvXqwuhUYVPlu3BbZ7ANLUMH_OYjo
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } pk.ed25519:inYgWFyL_BzZTsXNKp71r2aVct_3Izi_bkerbzOiz94
//! ```
//!
//! And the value of `text2` would be as follows:
//! And the value of `final_payload` would be as follows:
//! ```raw
//! {
//! hash = Se6Wmbh3fbFQ9_ilE6zGbxNaEd9v5CHAb30p46Fxpi74iblRb9fXmGAiMkXnSe4DePTwb16zGAz_Ux4ZAG9s3w,
//! hash = h.b2:B1AnRocS90DmqxynGyvvBNuh-brucNO7-5hrsGplJr0;
//! payload = CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } 1hUAS2C0lzHXHWIvXqwuhUYVPlu3BbZ7ANLUMH_OYjo,
//! signature = 8mo3aeQD7JAdqbDcm7oVdaU0XamDwg03JtC3mfsWhEy_ZkNmWBFZefIDlzBR3XpnF0szTzEwtoPFfnR1fz6fAA,
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } pk.ed25519:inYgWFyL_BzZTsXNKp71r2aVct_3Izi_bkerbzOiz94;
//! signature = sig.ed25519:LvLC1gHxNxUH44HHQRO-zWtLM4WyXhiYLFr94qTdI311Wa-kmgZsaWqSWe3jcjkS4PnsWSNt5apgbhR68cWWCg;
//! }
//! ```
//!
//! Note that the value of `text1` is embedded as-is inside `text2`. This is what allows us
//! to check the hash and the signature: the raw representation of the term hasn't changed.
pub mod crypto;
pub mod dec;
pub mod enc;
#[cfg(feature = "dryoc")]
pub mod crypto;
#[cfg(feature = "serde")]
pub mod serde;
@ -93,13 +98,23 @@ pub mod serde;
pub(crate) const DICT_OPEN: u8 = b'{';
pub(crate) const DICT_CLOSE: u8 = b'}';
pub(crate) const DICT_ASSIGN: u8 = b'=';
pub(crate) const DICT_DELIM: u8 = b',';
pub(crate) const STR_EXTRA_CHARS: &[u8] = b"._-+*?";
pub(crate) const DICT_DELIM: u8 = b';';
pub(crate) const LIST_OPEN: u8 = b'[';
pub(crate) const LIST_CLOSE: u8 = b']';
pub(crate) const LIST_DELIM: u8 = b';';
const BASE_EXTRA_CHARS: &[u8] = b".,:?!@$^<>|&#'_-+*/%";
const STR_EXTRA_CHARS: &[u8] = b"\\";
#[inline]
pub(crate) fn is_string_char(c: u8) -> bool {
c.is_ascii_alphanumeric() || STR_EXTRA_CHARS.contains(&c)
c.is_ascii_alphanumeric() || BASE_EXTRA_CHARS.contains(&c) || STR_EXTRA_CHARS.contains(&c)
}
#[inline]
pub(crate) fn is_whitespace(c: u8) -> bool {
c.is_ascii_whitespace()
}
pub(crate) fn debug(x: &[u8]) -> &str {
std::str::from_utf8(x).unwrap_or("<invalid ascii>")
}

View file

@ -6,7 +6,7 @@ use serde::de::{
};
use serde::Deserialize;
use crate::dec::debug as fmtdebug;
use crate::debug as fmtdebug;
use crate::dec::*;
use crate::serde::error::{Error, Result};
@ -14,12 +14,8 @@ use crate::serde::error::{Error, Result};
pub struct Deserializer<'de, 'a>(Term<'de, 'a>);
impl<'de, 'a> Deserializer<'de, 'a> {
fn from_term(input: &'a Term<'de, 'a>) -> Deserializer<'de, 'a> {
if let Ok(nested) = input.nested() {
Deserializer(nested)
} else {
Deserializer(Term(input.0.mkref()))
}
pub fn from_term(input: &'a Term<'de, 'a>) -> Deserializer<'de, 'a> {
Deserializer(Term(input.0.mkref()))
}
}
@ -228,7 +224,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de, 'a> {
where
V: Visitor<'de>,
{
let [variant, args] = self.0.list_of_first()?;
let [variant, args] = self.0.seq_of_first()?;
if variant.string()? == name {
visitor.visit_newtype_struct(&mut Deserializer(args))
} else {
@ -244,14 +240,14 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de, 'a> {
where
V: Visitor<'de>,
{
visitor.visit_seq(&mut Seq(&self.0.list()))
visitor.visit_seq(&mut Seq(&self.0.list()?))
}
fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value>
where
V: Visitor<'de>,
{
visitor.visit_seq(&mut Seq(&self.0.list()))
visitor.visit_seq(&mut Seq(&self.0.seq()))
}
fn deserialize_tuple_struct<V>(
@ -263,9 +259,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de, 'a> {
where
V: Visitor<'de>,
{
let [variant, args] = self.0.list_of_first()?;
let [variant, args] = self.0.seq_of_first()?;
if variant.string()? == name {
visitor.visit_seq(&mut Seq(&args.list()))
visitor.visit_seq(&mut Seq(&args.seq()))
} else {
Err(Error::custom(format!(
"Expected {}, got: `{}`",
@ -291,7 +287,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de, 'a> {
where
V: Visitor<'de>,
{
let [variant, data] = self.0.list_of()?;
let [variant, data] = self.0.seq_of()?;
if variant.string()? != name {
return Err(Error::custom(format!(
"Expected {}, got: `{}`",
@ -414,7 +410,7 @@ impl<'de, 'a> EnumAccess<'de> for Enum<'de, 'a> {
where
V: DeserializeSeed<'de>,
{
let variant = &self.0.list()[0];
let variant = &self.0.seq()[0];
let variant = seed.deserialize(&mut Deserializer(Term(variant.0.mkref())))?;
Ok((variant, self))
}
@ -424,7 +420,7 @@ impl<'de, 'a> VariantAccess<'de> for Enum<'de, 'a> {
type Error = Error;
fn unit_variant(self) -> Result<()> {
if self.0.list().len() > 1 {
if self.0.seq().len() > 1 {
Err(Error::custom("Spurrious data in unit variant"))
} else {
Ok(())
@ -435,7 +431,7 @@ impl<'de, 'a> VariantAccess<'de> for Enum<'de, 'a> {
where
T: DeserializeSeed<'de>,
{
let [_, rest] = self.0.list_of_first()?;
let [_, rest] = self.0.seq_of_first()?;
seed.deserialize(&mut Deserializer(rest))
}
@ -443,15 +439,15 @@ impl<'de, 'a> VariantAccess<'de> for Enum<'de, 'a> {
where
V: Visitor<'de>,
{
let [_, rest] = self.0.list_of_first()?;
visitor.visit_seq(&mut Seq(&rest.list()))
let [_, rest] = self.0.seq_of_first()?;
visitor.visit_seq(&mut Seq(&rest.seq()))
}
fn struct_variant<V>(self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>
where
V: Visitor<'de>,
{
let [_, rest] = self.0.list_of_first()?;
let [_, rest] = self.0.seq_of_first()?;
visitor.visit_map(&mut Dict::from_term(&rest)?)
}
}

View file

@ -11,17 +11,26 @@ pub use ser::{to_bytes, to_term, Serializer};
#[cfg(test)]
mod tests {
use super::*;
use crate::debug;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
fn test_bidir<T: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug>(
input: T,
expected: &[u8],
expected_concise: &str,
) {
eprintln!("Expecting: {}", expected_concise);
let ser = to_bytes(&input).unwrap();
eprintln!("Serialized: {}", std::str::from_utf8(&ser).unwrap());
assert_eq!(&ser, expected);
assert_eq!(&from_bytes::<T>(&ser).unwrap(), &input);
let ser = debug(&ser);
eprintln!("Serialized: {}", ser);
assert_eq!(from_bytes::<T>(ser.as_bytes()).unwrap(), input);
let ser_concise = to_term(&input).unwrap().encode_concise();
let ser_concise = debug(&ser_concise);
eprintln!("Serialized (concise): {}", ser_concise);
assert_eq!(ser_concise, expected_concise);
assert_eq!(from_bytes::<T>(ser_concise.as_bytes()).unwrap(), input);
}
#[test]
@ -36,10 +45,7 @@ mod tests {
int: 1,
seq: vec!["a".to_string(), "b".to_string()],
};
let expected = br#"Test {
int = 1,
seq = YQ Yg,
}"#;
let expected = r#"Test {int=1;seq=[YQ;Yg]}"#;
test_bidir(input, expected);
let input = vec![
@ -52,13 +58,7 @@ mod tests {
seq: vec!["c".to_string(), "d".to_string()],
},
];
let expected = br#"{.= Test {
int = 1,
seq = YQ Yg,
} } {.= Test {
int = 2,
seq = Yw ZA,
} }"#;
let expected = r#"[Test {int=1;seq=[YQ;Yg]};Test {int=2;seq=[Yw;ZA]}]"#;
test_bidir(input, expected);
}
@ -73,19 +73,19 @@ mod tests {
}
let input = E::Unit;
let expected = br#"E.Unit"#;
let expected = r#"E.Unit"#;
test_bidir(input, expected);
let input = E::Newtype(1);
let expected = br#"E.Newtype 1"#;
let expected = r#"E.Newtype 1"#;
test_bidir(input, expected);
let input = E::Tuple(1, 2);
let expected = br#"E.Tuple 1 2"#;
let expected = r#"E.Tuple 1 2"#;
test_bidir(input, expected);
let input = E::Struct { a: 1 };
let expected = br#"E.Struct { a = 1 }"#;
let expected = r#"E.Struct {a=1}"#;
test_bidir(input, expected);
let input = vec![
@ -96,37 +96,28 @@ mod tests {
E::Struct { a: 1 },
E::Tuple(3, 2),
];
let expected =
br#"E.Unit E.Unit {.= E.Newtype 1 } {.= E.Tuple 1 2 } {.= E.Struct { a = 1 } } {.=
E.Tuple 3 2 }"#;
let expected = r#"[E.Unit;E.Unit;E.Newtype 1;E.Tuple 1 2;E.Struct {a=1};E.Tuple 3 2]"#;
test_bidir(input, expected);
}
#[test]
fn test_seq1() {
let input = (1, 2, 3, 4);
let expected = br#"1 2 3 4"#;
let expected = r#"1 2 3 4"#;
test_bidir(input, expected);
}
#[test]
fn test_seq2() {
let input = (1, 2, (2, 3, 4), 5, 6);
let expected = br#"1 2 {.= 2 3 4 } 5 6"#;
fn test_list() {
let input = vec![1, 2, 3, 4];
let expected = r#"[1;2;3;4]"#;
test_bidir(input, expected);
}
#[test]
fn test_seq3() {
let input = [1, 2, 3, 4];
let expected = br#"1 2 3 4"#;
test_bidir(input, expected);
}
#[test]
fn test_seq4() {
let input = [[1, 2], [2, 3], [3, 4]];
let expected = br#"{.= 1 2 } {.= 2 3 } {.= 3 4 }"#;
fn test_seqlist() {
let input = vec![(1, 2), (2, 3), (3, 4), (5, 6)];
let expected = r#"[1 2;2 3;3 4;5 6]"#;
test_bidir(input, expected);
}
@ -135,19 +126,13 @@ mod tests {
let mut input = HashMap::new();
input.insert("hello".to_string(), "world".to_string());
input.insert("dont".to_string(), "panic".to_string());
let expected = br#"{
ZG9udA = cGFuaWM,
aGVsbG8 = d29ybGQ,
}"#;
let expected = r#"{ZG9udA=cGFuaWM;aGVsbG8=d29ybGQ}"#;
test_bidir(input, expected);
let mut input = HashMap::new();
input.insert(12, vec![42, 125]);
input.insert(33, vec![19, 22, 21]);
let expected = br#"{
12 = 42 125,
33 = 19 22 21,
}"#;
let expected = r#"{12=[42;125];33=[19;22;21]}"#;
test_bidir(input, expected);
}
}

View file

@ -5,6 +5,7 @@ use crate::serde::error::{Error, Result};
use serde::ser::Error as SerError;
/// Serde serializer for nettext
#[derive(Clone, Copy, Default)]
pub struct Serializer;
/// Serialize value to nettext encoder term
@ -12,7 +13,7 @@ pub fn to_term<T>(value: &T) -> Result<Term<'static>>
where
T: Serialize,
{
value.serialize(&mut Serializer)
value.serialize(&mut Serializer::default())
}
/// Serialize value to nettext
@ -20,7 +21,7 @@ pub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>
where
T: Serialize,
{
Ok(value.serialize(&mut Serializer)?.encode())
Ok(value.serialize(&mut Serializer::default())?.encode())
}
impl<'a> ser::Serializer for &'a mut Serializer {
@ -28,7 +29,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
type Error = Error;
type SerializeSeq = SeqSerializer;
type SerializeSeq = ListSerializer;
type SerializeTuple = SeqSerializer;
type SerializeTupleStruct = SeqSerializer;
type SerializeTupleVariant = SeqSerializer;
@ -104,7 +105,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
where
T: ?Sized + Serialize,
{
Ok(value.serialize(self)?.nested())
Ok(value.serialize(self)?)
}
fn serialize_unit(self) -> Result<Self::Ok> {
@ -128,7 +129,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
where
T: ?Sized + Serialize,
{
Ok(list_flatten([string(name)?, value.serialize(self)?])?)
Ok(seq_flatten([string(name)?, value.serialize(self)?]))
}
fn serialize_newtype_variant<T>(
@ -141,19 +142,23 @@ impl<'a> ser::Serializer for &'a mut Serializer {
where
T: ?Sized + Serialize,
{
Ok(list_flatten([
Ok(seq_flatten([
string_owned(format!("{}.{}", name, variant))?,
value.serialize(self)?,
])?)
]))
}
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq> {
Ok(SeqSerializer { items: vec![] })
Ok(ListSerializer {
items: vec![],
ser: *self,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple> {
Ok(SeqSerializer {
items: Vec::with_capacity(len),
ser: *self,
})
}
@ -164,7 +169,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
) -> Result<Self::SerializeTupleStruct> {
let mut items = Vec::with_capacity(len + 1);
items.push(string(name)?);
Ok(SeqSerializer { items })
Ok(SeqSerializer { items, ser: *self })
}
fn serialize_tuple_variant(
@ -176,13 +181,14 @@ impl<'a> ser::Serializer for &'a mut Serializer {
) -> Result<Self::SerializeTupleVariant> {
let mut items = Vec::with_capacity(len + 1);
items.push(string_owned(format!("{}.{}", name, variant))?);
Ok(SeqSerializer { items })
Ok(SeqSerializer { items, ser: *self })
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
Ok(MapSerializer {
next: None,
fields: vec![],
ser: *self,
})
}
@ -190,6 +196,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
Ok(StructSerializer {
name,
fields: Vec::with_capacity(len),
ser: *self,
})
}
@ -204,6 +211,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
name,
variant,
fields: Vec::with_capacity(len),
ser: *self,
})
}
}
@ -212,22 +220,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
pub struct SeqSerializer {
items: Vec<Term<'static>>,
}
impl ser::SerializeSeq for SeqSerializer {
type Ok = Term<'static>;
type Error = Error;
fn serialize_element<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
Ok(())
}
fn end(self) -> Result<Self::Ok> {
Ok(list_nested(self.items.into_iter())?)
}
ser: Serializer,
}
impl ser::SerializeTuple for SeqSerializer {
@ -238,12 +231,12 @@ impl ser::SerializeTuple for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
fn end(self) -> Result<Self::Ok> {
Ok(list_nested(self.items.into_iter())?)
Ok(seq(self.items.into_iter())?)
}
}
@ -255,12 +248,12 @@ impl ser::SerializeTupleStruct for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
fn end(self) -> Result<Self::Ok> {
Ok(list_nested(self.items.into_iter())?)
Ok(seq(self.items.into_iter())?)
}
}
@ -272,18 +265,40 @@ impl ser::SerializeTupleVariant for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
fn end(self) -> Result<Self::Ok> {
Ok(list_nested(self.items.into_iter())?)
Ok(seq(self.items.into_iter())?)
}
}
pub struct ListSerializer {
items: Vec<Term<'static>>,
ser: Serializer,
}
impl ser::SerializeSeq for ListSerializer {
type Ok = Term<'static>;
type Error = Error;
fn serialize_element<T>(&mut self, value: &T) -> Result<()>
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
fn end(self) -> Result<Self::Ok> {
Ok(list(self.items.into_iter()))
}
}
pub struct MapSerializer {
next: Option<Vec<u8>>,
fields: Vec<(Vec<u8>, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeMap for MapSerializer {
@ -294,7 +309,8 @@ impl ser::SerializeMap for MapSerializer {
where
T: ?Sized + Serialize,
{
self.next = Some(key.serialize(&mut Serializer)?.encode());
let mut ser = Serializer;
self.next = Some(key.serialize(&mut ser)?.encode());
Ok(())
}
@ -306,7 +322,7 @@ impl ser::SerializeMap for MapSerializer {
self.next
.take()
.ok_or_else(|| Self::Error::custom("no key"))?,
value.serialize(&mut Serializer)?,
value.serialize(&mut self.ser)?,
));
Ok(())
}
@ -319,6 +335,7 @@ impl ser::SerializeMap for MapSerializer {
pub struct StructSerializer {
name: &'static str,
fields: Vec<(&'static str, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeStruct for StructSerializer {
@ -329,12 +346,12 @@ impl ser::SerializeStruct for StructSerializer {
where
T: ?Sized + Serialize,
{
self.fields.push((key, value.serialize(&mut Serializer)?));
self.fields.push((key, value.serialize(&mut self.ser)?));
Ok(())
}
fn end(self) -> Result<Term<'static>> {
Ok(list([string(self.name)?, dict(self.fields.into_iter())?])?)
Ok(seq([string(self.name)?, dict(self.fields.into_iter())?])?)
}
}
@ -342,6 +359,7 @@ pub struct StructVariantSerializer {
name: &'static str,
variant: &'static str,
fields: Vec<(&'static str, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeStructVariant for StructVariantSerializer {
@ -352,12 +370,12 @@ impl ser::SerializeStructVariant for StructVariantSerializer {
where
T: ?Sized + Serialize,
{
self.fields.push((key, value.serialize(&mut Serializer)?));
self.fields.push((key, value.serialize(&mut self.ser)?));
Ok(())
}
fn end(self) -> Result<Term<'static>> {
Ok(list([
Ok(seq([
string_owned(format!("{}.{}", self.name, self.variant))?,
dict(self.fields.into_iter())?,
])?)