Compare commits

...

11 commits
v0.3.0 ... main

13 changed files with 944 additions and 296 deletions

1
.gitignore vendored
View file

@ -1,2 +1 @@
/target
/Cargo.lock

460
Cargo.lock generated Normal file
View file

@ -0,0 +1,460 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bitflags"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chacha20"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common",
"inout",
"zeroize",
]
[[package]]
name = "cpufeatures"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
dependencies = [
"libc",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "curve25519-dalek"
version = "4.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c"
dependencies = [
"cfg-if",
"cpufeatures",
"curve25519-dalek-derive",
"fiat-crypto",
"platforms",
"rustc_version",
"subtle",
"zeroize",
]
[[package]]
name = "curve25519-dalek-derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "dryoc"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65f5013c48133363c5a8db6bc74511b8b254680929c7757d9d833dea18c12f13"
dependencies = [
"bitflags",
"chacha20",
"curve25519-dalek",
"generic-array",
"lazy_static",
"libc",
"rand_core",
"salsa20",
"sha2",
"subtle",
"winapi",
"zeroize",
]
[[package]]
name = "err-derive"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34a887c8df3ed90498c1c437ce21f211c8e27672921a8ffa293cb8d6d4caa9e"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"rustversion",
"syn 1.0.109",
"synstructure",
]
[[package]]
name = "fiat-crypto"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a56f0780318174bad1c127063fd0c5fdfb35398e3cd79ffaab931a6c79df80"
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getrandom"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "inout"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
dependencies = [
"generic-array",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "memchr"
version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nettext"
version = "0.4.1"
dependencies = [
"base64",
"dryoc",
"err-derive",
"hex",
"nom",
"serde",
]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "platforms"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "salsa20"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
dependencies = [
"cipher",
]
[[package]]
name = "semver"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
[[package]]
name = "serde"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "subtle"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "zeroize"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
dependencies = [
"zeroize_derive",
]
[[package]]
name = "zeroize_derive"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]

View file

@ -2,7 +2,7 @@
name = "nettext"
description = "A text-based data format for cryptographic network protocols"
authors = ["Alex Auvolat <alex@adnab.me>"]
version = "0.3.0"
version = "0.4.1"
edition = "2021"
license = "AGPL-3.0"
readme = "README.md"
@ -12,9 +12,10 @@ readme = "README.md"
[dependencies]
nom = "7.1"
base64 = "0.13"
hex = "0.4"
err-derive = "0.3"
dryoc = { version = "0.4", optional = true }
dryoc = { version = "0.5", optional = true }
serde = { version = "1.0", optional = true, features = ["derive"] }
[features]

View file

@ -13,7 +13,7 @@ A text-based data format for cryptographic network protocols.
A term can be of any of the following kinds:
- a string, which may contain only ASCII alphanumeric terms and `.-_*?`
- a string, which may contain only ASCII alphanumeric characters and a limited subset of other ASCII characters that may not include characters used to represent other kinds of terms
- a dict, which maps strings (as defined above) to any term type
- a list, which may contain any number of any kind of terms (can be mixed)
- a sequence, consistuted of at least two of the above (can be mixed), simply separated by whitespace; sequences cannot be nested
@ -22,15 +22,15 @@ Dicts are represented as follows:
```
{
key1 = value1,
key1 = value1;
key2 = value2
}
```
Dicts are represented as follows:
Lists are represented as follows:
```
[ term1, term2 ]
[ term1; term2 ]
```
Sequences are represented as follows:
@ -43,9 +43,9 @@ As a consequence, complex data structures can be defined as follows:
```
SEND MESSAGE {
topic = blah,
topic = blah;
to = [
TOPIC hello,
TOPIC hello;
USER john
],
body = blah blah
@ -82,6 +82,30 @@ Terms can be interpreted in a number of different ways, depending on the context
Terms further have mappings as different data types:
- BYTES: if the term maps as a STRING, decode it using base64
- INT: if the term maps as a STRING, decode it as an integer written in decimal notation
- BYTES: if the term maps as a STRING, decode it using base64. Since a STRING cannot be empty, the string `-` is used to represent an empty byte string.
- Cryptographic data types (see below)
## Cryptographic data types
Cryptographic values such as keys, hashes, signatures, etc. are encoded
as STRING with a prefix indicating the algorithm used, followed by ":",
followed by the base64-encoded value.
Prefixes are as follows:
- `pk.box:` public key for NaCl's box API
- `sk.box:` secret key for NaCl's box API
- `sk.sbox:` secret key for NaCl's secretbox API
- `h.sha256:` sha256 hash
- `h.sha512:` sha512 hash
- `h.sha3:` sha3 hash
- `h.b2:` blake2b hash
- `h.b3:` blake3 hash
- `sig.ed25519:` ed25519 signature
- `pk.ed25519:` ed25519 public signing key
- `sk.ed25519:` ed25519 secret signing key
More can be added.
- HASH, PUBKEY, SECKEY, SIGNATURE, ENCKEY, DECKEY, SYMKEY: a bunch of things that interpret BYTES as specific cryptographic items

View file

@ -1,48 +1,158 @@
//! Helpers to use cryptographic data types in nettext
pub use dryoc::*;
pub use dryoc;
use dryoc::types::{Bytes, StackByteArray};
use dryoc::types::Bytes;
use dryoc::*;
use crate::dec;
use crate::enc;
pub type SigningKeyPair = sign::SigningKeyPair<sign::PublicKey, sign::SecretKey>;
const BM_HASH: &str = "h.b2";
impl<const N: usize> enc::Encode for StackByteArray<N> {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(self.as_slice()))
const BM_SIGNATURE: &str = "sig.ed25519";
const BM_SIGN_KEYPAIR: &str = "sk.ed25519";
const BM_SIGN_PUBKEY: &str = "pk.ed25519";
// ---- types ----
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Hash(pub generichash::Hash);
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Signature(pub sign::Signature);
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct SigningPublicKey(pub sign::PublicKey);
#[derive(PartialEq, Clone, Debug)]
pub struct SigningKeyPair(pub sign::SigningKeyPair<sign::PublicKey, sign::SecretKey>);
impl SigningKeyPair {
/// Return the public key of this keypair
pub fn public_key(&self) -> SigningPublicKey {
SigningPublicKey(self.0.public_key.clone())
}
}
impl enc::Encode for sign::SigningKeyPair<sign::PublicKey, sign::SecretKey> {
// ---- encoding ----
impl enc::Encode for Hash {
fn term(&self) -> enc::Result<'_> {
Ok(enc::bytes(self.secret_key.as_slice()))
enc::marked_bytes(BM_HASH, self.0.as_slice())
}
}
// ---- helpers ----
impl enc::Encode for Signature {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGNATURE, self.0.as_slice())
}
}
impl enc::Encode for SigningPublicKey {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGN_PUBKEY, self.0.as_slice())
}
}
impl enc::Encode for SigningKeyPair {
fn term(&self) -> enc::Result<'_> {
enc::marked_bytes(BM_SIGN_KEYPAIR, self.0.secret_key.as_slice())
}
}
// ---- calculating hashes, signatures, etc ----
/// Compute the hash of a payload with default dryoc parameters and optionnal key
pub fn compute_hash(bytes: &[u8], key: Option<&[u8; 32]>) -> generichash::Hash {
generichash::GenericHash::hash_with_defaults(bytes, key).unwrap()
pub fn compute_hash(bytes: &[u8], key: Option<&[u8; 32]>) -> Hash {
Hash(generichash::GenericHash::hash_with_defaults(bytes, key).unwrap())
}
/// Generate a new signing keypair
pub fn gen_signing_keypair() -> SigningKeyPair {
SigningKeyPair(sign::SigningKeyPair::gen_with_defaults())
}
/// Compute the ed25519 signature of a message using a secret key
pub fn compute_signature(message: &[u8], secret_key: &sign::SecretKey) -> sign::Signature {
SigningKeyPair::from_secret_key(secret_key.clone())
.sign_with_defaults(message)
.unwrap()
.into_parts()
.0
pub fn compute_signature(message: &[u8], keypair: &SigningKeyPair) -> Signature {
Signature(
keypair
.0
.sign_with_defaults(message)
.unwrap()
.into_parts()
.0,
)
}
/// Verify the ed25519 signature of a message using a public key
pub fn verify_signature(
signature: &sign::Signature,
signature: &Signature,
message: &[u8],
public_key: &sign::PublicKey,
public_key: &SigningPublicKey,
) -> bool {
sign::SignedMessage::from_parts(signature.clone(), message)
.verify(public_key)
sign::SignedMessage::from_parts(signature.0.clone(), message.to_vec())
.verify(&public_key.0)
.is_ok()
}
// ---- decode helpers ----
pub trait CryptoDec {
/// Try to interpret this string as a Blake2b512 digest
/// (32-bytes base64 encoded, prefixed by `h.b2:`)
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
/// use nettext::crypto::{compute_hash, CryptoDec};
///
/// let term = decode(b"{
/// message = hello;
/// hash = h.b2:Mk3PAn3UowqTLEQfNlol6GsXPe-kuOWJSCU0cbgbcs8;
/// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// let expected_hash = compute_hash(msg.raw(), None);
/// assert_eq!(hash.hash().unwrap(), expected_hash);
/// ```
fn hash(&self) -> Result<Hash, dec::TypeError>;
/// Try to interpret this string as an ed25519 signature
/// (64 bytes base64 encoded, prefixed by `sig.ed25519:`)
fn signature(&self) -> Result<Signature, dec::TypeError>;
/// Try to interpret this string as an ed25519 keypair
/// (64 bytes base64 encoded, prefixed by `sk.ed25519:`)
fn keypair(&self) -> Result<SigningKeyPair, dec::TypeError>;
/// Try to interpret this string as an ed25519 public key
/// (32 bytes base64 encoded, prefixed by `pk.ed25519:`)
fn public_key(&self) -> Result<SigningPublicKey, dec::TypeError>;
}
impl<'a, 'b> CryptoDec for dec::Term<'a, 'b> {
fn hash(&self) -> Result<Hash, dec::TypeError> {
Ok(Hash(generichash::Hash::from(
self.marked_bytes_exact(BM_HASH)?,
)))
}
/// Try to interpret this string as an ed25519 signature (64 bytes base64 encoded)
fn signature(&self) -> Result<Signature, dec::TypeError> {
Ok(Signature(sign::Signature::from(
self.marked_bytes_exact(BM_SIGNATURE)?,
)))
}
fn keypair(&self) -> Result<SigningKeyPair, dec::TypeError> {
let secret_key = sign::SecretKey::from(self.marked_bytes_exact(BM_SIGN_KEYPAIR)?);
Ok(SigningKeyPair(sign::SigningKeyPair::from_secret_key(
secret_key,
)))
}
fn public_key(&self) -> Result<SigningPublicKey, dec::TypeError> {
Ok(SigningPublicKey(sign::PublicKey::from(
self.marked_bytes_exact(BM_SIGN_PUBKEY)?,
)))
}
}

View file

@ -138,11 +138,11 @@ mod tests {
#[test]
fn simple_dict() {
let bytes = b" { aze = hello, by = bojzkz pipo, ccde = ke } ";
let bytes = b" { aze = hello; by = bojzkz pipo; ccde = ke } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo, ccde = ke }",
b"{ aze = hello; by = bojzkz pipo; ccde = ke }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
@ -163,11 +163,11 @@ mod tests {
#[test]
fn simple_dict_2() {
let bytes = b" { aze = hello, by = bojzkz pipo , ccde = ke , } ";
let bytes = b" { aze = hello; by = bojzkz pipo ; ccde = ke ; } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo , ccde = ke , }",
b"{ aze = hello; by = bojzkz pipo ; ccde = ke ; }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
@ -188,11 +188,11 @@ mod tests {
#[test]
fn simple_list() {
let bytes = b" [ hello, bojzkz pipo, ke ] ";
let bytes = b" [ hello; bojzkz pipo; ke ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello, bojzkz pipo, ke ]",
b"[ hello; bojzkz pipo; ke ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
@ -209,11 +209,11 @@ mod tests {
#[test]
fn simple_list_2() {
let bytes = b" [ hello, bojzkz pipo , ke , ] ";
let bytes = b" [ hello; bojzkz pipo ; ke ; ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello, bojzkz pipo , ke , ]",
b"[ hello; bojzkz pipo ; ke ; ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
@ -243,7 +243,7 @@ mod tests {
#[test]
fn real_world_2() {
let bytes = b"STANCE sthash stsign { author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }";
let bytes = b"STANCE sthash stsign { author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Seq(
@ -252,7 +252,7 @@ mod tests {
NonSeqTerm::Str(b"STANCE"),
NonSeqTerm::Str(b"sthash"),
NonSeqTerm::Str(b"stsign"),
NonSeqTerm::Dict(b"{ author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }",
NonSeqTerm::Dict(b"{ author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }",
[
(&b"author"[..], AnyTerm::Str(b"alexpubkey")),
(&b"height"[..], AnyTerm::Str(b"12")),
@ -279,7 +279,7 @@ mod tests {
#[test]
fn real_world_3() {
let bytes = b"[ USER john, USER luke, GROUP strategy { owner = USER john, members = [ USER john, USER luke ] } ]";
let bytes = b"[ USER john; USER luke; GROUP strategy { owner = USER john; members = [ USER john; USER luke ] } ]";
let user_john = AnyTerm::Seq(
b"USER john",
vec![NonSeqTerm::Str(b"USER"), NonSeqTerm::Str(b"john")],
@ -297,18 +297,18 @@ mod tests {
user_john.clone(),
user_luke.clone(),
AnyTerm::Seq(
b"GROUP strategy { owner = USER john, members = [ USER john, USER luke ] }",
b"GROUP strategy { owner = USER john; members = [ USER john; USER luke ] }",
vec![
NonSeqTerm::Str(b"GROUP"),
NonSeqTerm::Str(b"strategy"),
NonSeqTerm::Dict(
b"{ owner = USER john, members = [ USER john, USER luke ] }",
b"{ owner = USER john; members = [ USER john; USER luke ] }",
[
(&b"owner"[..], user_john.clone()),
(
&b"members"[..],
AnyTerm::List(
b"[ USER john, USER luke ]",
b"[ USER john; USER luke ]",
vec![user_john, user_luke,]
)
)

View file

@ -8,6 +8,9 @@ pub enum TypeError {
/// The term could not be decoded in the given type
#[error(display = "Not a {}", _0)]
WrongType(&'static str),
/// The term did not have the correct marker
#[error(display = "Byte marker was not {}", _0)]
WrongMarker(&'static str),
/// The term is not an array of the requested length
#[error(display = "Expected {} items, got {}", _0, _1)]

View file

@ -5,9 +5,6 @@ mod error;
use std::collections::HashMap;
#[cfg(any(feature = "dryoc"))]
use crate::crypto;
use crate::debug;
pub use decode::*;
@ -101,8 +98,8 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"hello { a = x, b = y }").unwrap();
/// assert_eq!(term.raw_str().unwrap(), "hello { a = x, b = y }");
/// let term = decode(b"hello { a = x; b = y }").unwrap();
/// assert_eq!(term.raw_str().unwrap(), "hello { a = x; b = y }");
/// ```
pub fn raw_str(&self) -> Result<&'a str, TypeError> {
Ok(std::str::from_utf8(self.0.raw())?)
@ -266,7 +263,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2 }").unwrap();
/// let dict = term.dict().unwrap();
/// assert_eq!(dict.get("k1").unwrap().str().unwrap(), "v1");
/// assert_eq!(dict.get("k2").unwrap().str().unwrap(), "v2");
@ -292,7 +289,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k3 = v3 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k3 = v3 }").unwrap();
/// let [s1, s2] = term.dict_of(["k1", "k2"], true).unwrap();
/// assert_eq!(s1.str().unwrap(), "v1");
/// assert_eq!(s2.str().unwrap(), "v2");
@ -334,7 +331,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k4 = v4 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k4 = v4 }").unwrap();
/// let [s1, s2, s3] = term.dict_of_opt(["k1", "k2", "k3"], true).unwrap();
/// assert_eq!(s1.unwrap().str().unwrap(), "v1");
/// assert_eq!(s2.unwrap().str().unwrap(), "v2");
@ -370,7 +367,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term2 = decode(b"[ hello, world ]").unwrap();
/// let term2 = decode(b"[ hello; world ]").unwrap();
/// let seq2 = term2.list().unwrap();
/// assert_eq!(seq2.len(), 2);
/// assert_eq!(seq2[0].str().unwrap(), "hello");
@ -378,8 +375,10 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
pub fn list(&self) -> Result<Vec<Term<'a, '_>>, TypeError> {
match self.0.mkref() {
AnyTerm::ListRef(_r, l) => Ok(l.iter().map(|x| Term(x.mkref().into())).collect::<Vec<_>>()),
_ => Err(TypeError::WrongType("LIST")),
AnyTerm::ListRef(_r, l) => {
Ok(l.iter().map(|x| Term(x.mkref().into())).collect::<Vec<_>>())
}
_ => Err(TypeError::WrongType("LIST")),
}
}
@ -418,7 +417,7 @@ impl<'a, 'b> Term<'a, 'b> {
};
match self.0.mkref() {
AnyTerm::Str(encoded) => {
if encoded == b"." {
if encoded == b"-" {
Ok(vec![])
} else {
decode(encoded)
@ -440,68 +439,40 @@ impl<'a, 'b> Term<'a, 'b> {
}
/// Try to interpret this string as base64-encoded bytes,
/// with an exact length.
/// with a marker prefix and an exact byte length.
/// This is typically used for cryptographic data types such as hashes,
/// keys, signatures, ...
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"aGVsbG8sIHdvcmxkIQ").unwrap();
/// assert_eq!(&term.bytes_exact::<13>().unwrap(), b"hello, world!");
/// let term = decode(b"test:aGVsbG8sIHdvcmxkIQ").unwrap();
/// assert_eq!(&term.marked_bytes_exact::<13>("test").unwrap(), b"hello, world!");
/// ```
pub fn bytes_exact<const N: usize>(&self) -> Result<[u8; N], TypeError> {
let bytes = self.bytes()?;
let bytes_len = bytes.len();
bytes
.try_into()
.map_err(|_| TypeError::WrongLength(bytes_len, N))
}
}
// ---- CRYPTO HELPERS ----
#[cfg(feature = "dryoc")]
impl<'a, 'b> Term<'a, 'b> {
/// Try to interpret this string as a Blake2b512 digest (32-bytes base64 encoded)
///
/// Example:
///
/// ```
/// use nettext::dec::decode;
/// use nettext::crypto::generichash::GenericHash;
///
/// let term = decode(b"{
/// message = hello,
/// hash = Mk3PAn3UowqTLEQfNlol6GsXPe-kuOWJSCU0cbgbcs8,
/// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// let expected_hash = GenericHash::hash_with_defaults(msg.raw(), None::<&Vec<u8>>).unwrap();
/// assert_eq!(hash.hash().unwrap(), expected_hash);
/// ```
pub fn hash(&self) -> Result<crypto::generichash::Hash, TypeError> {
Ok(crypto::generichash::Hash::from(self.bytes_exact()?))
}
/// Try to interpret this string as an ed25519 keypair (64 bytes base64 encoded)
pub fn keypair(&self) -> Result<crypto::SigningKeyPair, TypeError> {
let secret_key = crypto::sign::SecretKey::from(self.bytes_exact()?);
Ok(crypto::SigningKeyPair::from_secret_key(secret_key))
}
/// Try to interpret this string as an ed25519 public key (32 bytes base64 encoded)
pub fn public_key(&self) -> Result<crypto::sign::PublicKey, TypeError> {
Ok(crypto::sign::PublicKey::from(self.bytes_exact()?))
}
/// Try to interpret this string as an ed25519 secret key (32 bytes base64 encoded)
pub fn secret_key(&self) -> Result<crypto::sign::SecretKey, TypeError> {
Ok(crypto::sign::SecretKey::from(self.bytes_exact()?))
}
/// Try to interpret this string as an ed25519 signature (64 bytes base64 encoded)
pub fn signature(&self) -> Result<crypto::sign::Signature, TypeError> {
Ok(crypto::sign::Signature::from(self.bytes_exact()?))
pub fn marked_bytes_exact<const N: usize>(
&self,
marker: &'static str,
) -> Result<[u8; N], TypeError> {
let mkr = marker.as_bytes();
match &self.0 {
AnyTerm::Str(s)
if s.len() >= mkr.len() + 2 && &s[..mkr.len()] == mkr && s[mkr.len()] == b':' =>
{
let bytes = match &s[mkr.len() + 1..] {
b"-" => vec![],
bytes => base64::decode_config(bytes, base64::URL_SAFE_NO_PAD)
.map_err(|_| TypeError::WrongType("BYTES"))?,
};
let bytes_len = bytes.len();
bytes
.try_into()
.map_err(|_| TypeError::WrongLength(bytes_len, N))
}
AnyTerm::Str(_) => Err(TypeError::WrongMarker(marker)),
_ => Err(TypeError::WrongType("BYTES")),
}
}
}

View file

@ -11,7 +11,7 @@
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! ]).unwrap().encode();
@ -23,6 +23,7 @@ use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use crate::dec::{self, decode};
use crate::*;
use crate::{is_string_char, is_whitespace};
pub use error::Error;
@ -87,7 +88,7 @@ pub fn string_owned(s: String) -> Result<'static> {
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(raw(b"Hello { a = b, c = d} .").unwrap().encode(), b"Hello { a = b, c = d} .");
/// assert_eq!(raw(b"Hello { a = b; c = d} .").unwrap().encode(), b"Hello { a = b; c = d} .");
/// ```
pub fn raw(bytes: &[u8]) -> Result<'_> {
if decode(bytes).is_err() {
@ -96,6 +97,84 @@ pub fn raw(bytes: &[u8]) -> Result<'_> {
Ok(Term(T::Str(bytes)))
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding.
/// Since empty strings are not possible in nettext,
/// an empty byte string is encoded as the special string `-`.
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(bytes(b"").encode(), b"-");
/// assert_eq!(bytes(b"hello, world!").encode(), b"aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn bytes(bytes: &[u8]) -> Term<'static> {
if bytes.is_empty() {
Term(T::Str(b"-"))
} else {
Term(T::OwnedStr(
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD).into_bytes(),
))
}
}
/// Same as `bytes()`, but splits the byte slice in 48-byte chunks
/// and encodes each chunk separately, putting them in a sequence of terms.
/// Usefull for long byte slices to have cleaner representations.
pub fn bytes_split(bytes: &[u8]) -> Term<'static> {
if bytes.is_empty() {
Term(T::Str(b"-"))
} else {
let chunks = bytes
.chunks(48)
.map(|b| T::OwnedStr(base64::encode_config(b, base64::URL_SAFE_NO_PAD).into_bytes()))
.collect::<Vec<_>>();
if chunks.len() > 1 {
Term(T::Seq(chunks))
} else {
Term(chunks.into_iter().next().unwrap())
}
}
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding,
/// with a prefix used to identify its content type.
/// The marker prefix is typically used in crypto settings to identify
/// a cryptographic protocol or algorithm; it may not contain the `:` character.
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(marked_bytes("mytype", b"").unwrap().encode(), b"mytype:-");
/// assert_eq!(marked_bytes("mytype", b"hello, world!").unwrap().encode(), b"mytype:aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn marked_bytes(marker: &str, bytes: &[u8]) -> Result<'static> {
for c in marker.as_bytes().iter() {
if !is_string_char(*c) || *c == b':' {
return Err(Error::InvalidCharacter(*c));
}
}
if bytes.is_empty() {
Ok(Term(T::OwnedStr(format!("{}:-", marker).into_bytes())))
} else {
Ok(Term(T::OwnedStr(
format!(
"{}:{}",
marker,
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD)
)
.into_bytes(),
)))
}
}
// ---- composed terms -----
/// Term corresponding to a sequence of terms. Subsequences are banned and will raise an error.
///
/// ```
@ -137,7 +216,7 @@ pub fn seq_flatten<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
/// assert_eq!(list([
/// string("Hello").unwrap(),
/// string("world").unwrap()
/// ]).encode(), b"[\n Hello,\n world,\n]");
/// ]).encode(), b"[\n Hello;\n world;\n]");
/// ```
pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
let terms = terms.into_iter().map(|x| x.0).collect::<Vec<_>>();
@ -152,7 +231,7 @@ pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
/// assert_eq!(dict([
/// ("a", string("Hello").unwrap()),
/// ("b", string("world").unwrap())
/// ]).unwrap().encode(), b"{\n a = Hello,\n b = world,\n}");
/// ]).unwrap().encode(), b"{\n a = Hello;\n b = world;\n}");
/// ```
pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Result<'a> {
let mut tmp = HashMap::new();
@ -164,38 +243,6 @@ pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Result
Ok(Term(T::Dict(tmp)))
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(bytes(b"hello, world!").encode(), b"aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn bytes(bytes: &[u8]) -> Term<'static> {
Term(T::OwnedStr(
base64::encode_config(bytes, base64::URL_SAFE_NO_PAD).into_bytes(),
))
}
/// Same as `bytes()`, but splits the byte slice in 48-byte chunks
/// and encodes each chunk separately, putting them in a sequence of terms.
/// Usefull for long byte slices to have cleaner representations,
/// mainly usefull for dictionnary keys.
pub fn bytes_split(bytes: &[u8]) -> Term<'static> {
let chunks = bytes
.chunks(48)
.map(|b| T::OwnedStr(base64::encode_config(b, base64::URL_SAFE_NO_PAD).into_bytes()))
.collect::<Vec<_>>();
if chunks.len() > 1 {
Term(T::Seq(chunks))
} else {
Term(chunks.into_iter().next().unwrap_or(T::Str(b".")))
}
}
impl<'a> Term<'a> {
/// Append a term to an existing term.
/// Transforms the initial term into a seq if necessary.
@ -262,6 +309,13 @@ impl<'a> Term<'a> {
pub fn encode_string(self) -> String {
unsafe { String::from_utf8_unchecked(self.encode()) }
}
/// Generate the concise nettext representation of a term
pub fn encode_concise(self) -> Vec<u8> {
let mut buf = Vec::with_capacity(128);
self.0.encode_concise_aux(&mut buf);
buf
}
}
impl<'a> T<'a> {
@ -271,16 +325,16 @@ impl<'a> T<'a> {
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
if d.is_empty() {
buf.extend_from_slice(b"{}");
buf.extend_from_slice(&[DICT_OPEN, DICT_CLOSE]);
} else if d.len() == 1 {
let (k, v) = d.into_iter().next().unwrap();
buf.extend_from_slice(b"{ ");
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
buf.extend_from_slice(&[DICT_OPEN, b' ']);
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(b" }");
buf.extend_from_slice(&[b' ', DICT_CLOSE]);
} else {
buf.extend_from_slice(b"{\n");
buf.extend_from_slice(&[DICT_OPEN, b'\n']);
let indent2 = indent + 2;
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
@ -290,39 +344,42 @@ impl<'a> T<'a> {
buf.push(b' ');
}
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent2, false);
buf.extend_from_slice(b",\n");
buf.extend_from_slice(&[DICT_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(b'}');
buf.push(DICT_CLOSE);
}
}
T::List(l) => {
if l.len() == 0 {
buf.extend_from_slice(&[LIST_OPEN, LIST_CLOSE]);
} else if l.len() == 1 {
buf.extend_from_slice(&[LIST_OPEN, b' ']);
l.into_iter()
.next()
.unwrap()
.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(&[b' ', LIST_CLOSE]);
} else {
let indent2 = indent + 2;
buf.extend_from_slice(&[LIST_OPEN, b'\n']);
for item in l {
for _ in 0..indent2 {
buf.push(b' ');
}
item.encode_aux(buf, indent2, false);
buf.extend_from_slice(&[LIST_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(LIST_CLOSE);
}
}
T::List(l) => {
if l.len() == 0 {
buf.extend_from_slice(b"[]");
} else if l.len() == 1 {
buf.extend_from_slice(b"[ ");
l.into_iter().next().unwrap().encode_aux(buf, indent + 2, false);
buf.extend_from_slice(b" ]");
} else {
let indent2 = indent + 2;
buf.extend_from_slice(b"[\n");
for item in l {
for _ in 0..indent2 {
buf.push(b' ');
}
item.encode_aux(buf, indent2, false);
buf.extend_from_slice(b",\n");
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(b']');
}
}
T::Seq(l) => {
let indent2 = indent + 2;
for (i, v) in l.into_iter().enumerate() {
@ -339,41 +396,94 @@ impl<'a> T<'a> {
}
}
}
fn encode_concise_aux(self, buf: &mut Vec<u8>) {
match self {
T::Str(s) => buf.extend_from_slice(s),
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
buf.push(DICT_OPEN);
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
for (i, k) in keys.into_iter().enumerate() {
if i > 0 {
buf.push(DICT_DELIM);
}
let v = d.remove(&k).unwrap();
buf.extend_from_slice(k.borrow());
buf.push(DICT_ASSIGN);
v.encode_concise_aux(buf);
}
buf.push(DICT_CLOSE);
}
T::List(l) => {
buf.push(LIST_OPEN);
for (i, item) in l.into_iter().enumerate() {
if i > 0 {
buf.push(LIST_DELIM);
}
item.encode_concise_aux(buf);
}
buf.push(LIST_CLOSE);
}
T::Seq(l) => {
for (i, v) in l.into_iter().enumerate() {
if i > 0 {
buf.push(b' ');
}
v.encode_concise_aux(buf);
}
}
}
}
}
#[cfg(test)]
mod tests {
use crate::debug;
use super::*;
use crate::debug;
#[test]
fn complex1() {
let input = seq([
string("HELLO").unwrap(),
string("alexhelloworld").unwrap(),
list([
string("dude").unwrap(),
string("why").unwrap(),
]),
list([string("dude").unwrap(), string("why").unwrap()]),
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop, f2 = kuko }").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap(),
])
.unwrap();
let expected = "HELLO alexhelloworld [
dude,
why,
dude;
why;
] {
data = { f1 = plop, f2 = kuko },
from = jxx,
subject = hello,
data = { f1 = plop; f2 = kuko };
from = jxx;
subject = hello;
}";
let enc = input.encode();
eprintln!("{}", debug(&enc));
eprintln!("{}", expected);
assert_eq!(debug(&enc), expected);
assert_eq!(debug(&input.encode()), expected);
}
#[test]
fn complex1_concise() {
let input = seq([
string("HELLO").unwrap(),
string("alexhelloworld").unwrap(),
list([string("dude").unwrap(), string("why").unwrap()]),
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap(),
])
.unwrap();
let expected_concise = "HELLO alexhelloworld [dude;why] {data={ f1 = plop; f2 = kuko };from=jxx;subject=hello}";
assert_eq!(debug(&input.encode_concise()), expected_concise);
}
}

View file

@ -6,7 +6,7 @@
//! use nettext::crypto::*;
//!
//! let final_payload = {
//! let keypair = SigningKeyPair::gen_with_defaults();
//! let keypair = gen_signing_keypair();
//!
//! // Encode a fist object that represents a payload that will be hashed and signed
//! let signed_payload = seq([
@ -15,15 +15,15 @@
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! keypair.public_key.term().unwrap(),
//! keypair.public_key().term().unwrap(),
//! ]).unwrap().encode();
//! eprintln!("{}", std::str::from_utf8(&signed_payload).unwrap());
//!
//! let hash = compute_hash(&signed_payload, None);
//! let sign = compute_signature(&signed_payload[..], &keypair.secret_key);
//! let sign = compute_signature(&signed_payload[..], &keypair);
//!
//! // Encode a second object that represents the signed and hashed payload
//! dict([
@ -52,33 +52,32 @@
//! assert_eq!(arg1.string().unwrap(), "myfunction");
//! ```
//!
//! The value of `text1` would be as follows:
//! The value of `signed_payload` would be as follows:
//!
//! ```raw
//!
//! CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } M3_to5OQ5TvIVyoRXTSK4Jz-zvSqsuh3a68haud_8Vs
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } pk.ed25519:inYgWFyL_BzZTsXNKp71r2aVct_3Izi_bkerbzOiz94
//! ```
//!
//! And the value of `text2` would be as follows:
//! And the value of `final_payload` would be as follows:
//! ```raw
//! {
//! hash = Hxpas10VnFIq8WIWGmQk7YLbxT-OMIkg0-sKSBJnUuo,
//! hash = h.b2:B1AnRocS90DmqxynGyvvBNuh-brucNO7-5hrsGplJr0;
//! payload = CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } M3_to5OQ5TvIVyoRXTSK4Jz-zvSqsuh3a68haud_8Vs,
//! signature = DAgQDqxi6rDEkGVoUmfHexWUCFYKNbQR0Fgp3_EiaMxiFLeQdy3w3If_lsYqDDmWHYR51whfaNGQZ6PxVthMAA,
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } pk.ed25519:inYgWFyL_BzZTsXNKp71r2aVct_3Izi_bkerbzOiz94;
//! signature = sig.ed25519:LvLC1gHxNxUH44HHQRO-zWtLM4WyXhiYLFr94qTdI311Wa-kmgZsaWqSWe3jcjkS4PnsWSNt5apgbhR68cWWCg;
//! }
//! ```
//!
@ -99,16 +98,19 @@ pub mod serde;
pub(crate) const DICT_OPEN: u8 = b'{';
pub(crate) const DICT_CLOSE: u8 = b'}';
pub(crate) const DICT_ASSIGN: u8 = b'=';
pub(crate) const DICT_DELIM: u8 = b',';
pub(crate) const DICT_DELIM: u8 = b';';
pub(crate) const LIST_OPEN: u8 = b'[';
pub(crate) const LIST_CLOSE: u8 = b']';
pub(crate) const LIST_DELIM: u8 = b',';
pub(crate) const STR_EXTRA_CHARS: &[u8] = b"._-+*?@:";
pub(crate) const LIST_DELIM: u8 = b';';
const BASE_EXTRA_CHARS: &[u8] = b".,:?!@$^<>|&#'_-+*/%";
const STR_EXTRA_CHARS: &[u8] = b"\\";
#[inline]
pub(crate) fn is_string_char(c: u8) -> bool {
c.is_ascii_alphanumeric() || STR_EXTRA_CHARS.contains(&c)
c.is_ascii_alphanumeric() || BASE_EXTRA_CHARS.contains(&c) || STR_EXTRA_CHARS.contains(&c)
}
#[inline]
pub(crate) fn is_whitespace(c: u8) -> bool {
c.is_ascii_whitespace()
}

View file

@ -14,7 +14,7 @@ use crate::serde::error::{Error, Result};
pub struct Deserializer<'de, 'a>(Term<'de, 'a>);
impl<'de, 'a> Deserializer<'de, 'a> {
fn from_term(input: &'a Term<'de, 'a>) -> Deserializer<'de, 'a> {
pub fn from_term(input: &'a Term<'de, 'a>) -> Deserializer<'de, 'a> {
Deserializer(Term(input.0.mkref()))
}
}

View file

@ -11,20 +11,26 @@ pub use ser::{to_bytes, to_term, Serializer};
#[cfg(test)]
mod tests {
use super::*;
use crate::debug;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::debug;
fn test_bidir<T: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug>(
input: T,
expected: &str,
expected_concise: &str,
) {
eprintln!("Expecting: {}", expected);
eprintln!("Expecting: {}", expected_concise);
let ser = to_bytes(&input).unwrap();
let ser = debug(&ser);
eprintln!("Serialized: {}", ser);
assert_eq!(ser, expected);
assert_eq!(from_bytes::<T>(ser.as_bytes()).unwrap(), input);
let ser_concise = to_term(&input).unwrap().encode_concise();
let ser_concise = debug(&ser_concise);
eprintln!("Serialized (concise): {}", ser_concise);
assert_eq!(ser_concise, expected_concise);
assert_eq!(from_bytes::<T>(ser_concise.as_bytes()).unwrap(), input);
}
#[test]
@ -39,13 +45,7 @@ mod tests {
int: 1,
seq: vec!["a".to_string(), "b".to_string()],
};
let expected = r#"Test {
int = 1,
seq = [
YQ,
Yg,
],
}"#;
let expected = r#"Test {int=1;seq=[YQ;Yg]}"#;
test_bidir(input, expected);
let input = vec![
@ -58,22 +58,7 @@ mod tests {
seq: vec!["c".to_string(), "d".to_string()],
},
];
let expected = r#"[
Test {
int = 1,
seq = [
YQ,
Yg,
],
},
Test {
int = 2,
seq = [
Yw,
ZA,
],
},
]"#;
let expected = r#"[Test {int=1;seq=[YQ;Yg]};Test {int=2;seq=[Yw;ZA]}]"#;
test_bidir(input, expected);
}
@ -100,7 +85,7 @@ mod tests {
test_bidir(input, expected);
let input = E::Struct { a: 1 };
let expected = r#"E.Struct { a = 1 }"#;
let expected = r#"E.Struct {a=1}"#;
test_bidir(input, expected);
let input = vec![
@ -111,15 +96,7 @@ mod tests {
E::Struct { a: 1 },
E::Tuple(3, 2),
];
let expected =
r#"[
E.Unit,
E.Unit,
E.Newtype 1,
E.Tuple 1 2,
E.Struct { a = 1 },
E.Tuple 3 2,
]"#;
let expected = r#"[E.Unit;E.Unit;E.Newtype 1;E.Tuple 1 2;E.Struct {a=1};E.Tuple 3 2]"#;
test_bidir(input, expected);
}
@ -133,24 +110,14 @@ mod tests {
#[test]
fn test_list() {
let input = vec![1, 2, 3, 4];
let expected = r#"[
1,
2,
3,
4,
]"#;
let expected = r#"[1;2;3;4]"#;
test_bidir(input, expected);
}
#[test]
fn test_seqlist() {
let input = vec![(1, 2), (2, 3), (3, 4), (5, 6)];
let expected = r#"[
1 2,
2 3,
3 4,
5 6,
]"#;
let expected = r#"[1 2;2 3;3 4;5 6]"#;
test_bidir(input, expected);
}
@ -159,26 +126,13 @@ mod tests {
let mut input = HashMap::new();
input.insert("hello".to_string(), "world".to_string());
input.insert("dont".to_string(), "panic".to_string());
let expected = r#"{
ZG9udA = cGFuaWM,
aGVsbG8 = d29ybGQ,
}"#;
let expected = r#"{ZG9udA=cGFuaWM;aGVsbG8=d29ybGQ}"#;
test_bidir(input, expected);
let mut input = HashMap::new();
input.insert(12, vec![42, 125]);
input.insert(33, vec![19, 22, 21]);
let expected = r#"{
12 = [
42,
125,
],
33 = [
19,
22,
21,
],
}"#;
let expected = r#"{12=[42;125];33=[19;22;21]}"#;
test_bidir(input, expected);
}
}

View file

@ -5,6 +5,7 @@ use crate::serde::error::{Error, Result};
use serde::ser::Error as SerError;
/// Serde serializer for nettext
#[derive(Clone, Copy, Default)]
pub struct Serializer;
/// Serialize value to nettext encoder term
@ -12,7 +13,7 @@ pub fn to_term<T>(value: &T) -> Result<Term<'static>>
where
T: Serialize,
{
value.serialize(&mut Serializer)
value.serialize(&mut Serializer::default())
}
/// Serialize value to nettext
@ -20,7 +21,7 @@ pub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>
where
T: Serialize,
{
Ok(value.serialize(&mut Serializer)?.encode())
Ok(value.serialize(&mut Serializer::default())?.encode())
}
impl<'a> ser::Serializer for &'a mut Serializer {
@ -148,12 +149,16 @@ impl<'a> ser::Serializer for &'a mut Serializer {
}
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq> {
Ok(ListSerializer { items: vec![] })
Ok(ListSerializer {
items: vec![],
ser: *self,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple> {
Ok(SeqSerializer {
items: Vec::with_capacity(len),
ser: *self,
})
}
@ -164,7 +169,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
) -> Result<Self::SerializeTupleStruct> {
let mut items = Vec::with_capacity(len + 1);
items.push(string(name)?);
Ok(SeqSerializer { items })
Ok(SeqSerializer { items, ser: *self })
}
fn serialize_tuple_variant(
@ -176,13 +181,14 @@ impl<'a> ser::Serializer for &'a mut Serializer {
) -> Result<Self::SerializeTupleVariant> {
let mut items = Vec::with_capacity(len + 1);
items.push(string_owned(format!("{}.{}", name, variant))?);
Ok(SeqSerializer { items })
Ok(SeqSerializer { items, ser: *self })
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
Ok(MapSerializer {
next: None,
fields: vec![],
ser: *self,
})
}
@ -190,6 +196,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
Ok(StructSerializer {
name,
fields: Vec::with_capacity(len),
ser: *self,
})
}
@ -204,6 +211,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
name,
variant,
fields: Vec::with_capacity(len),
ser: *self,
})
}
}
@ -212,6 +220,7 @@ impl<'a> ser::Serializer for &'a mut Serializer {
pub struct SeqSerializer {
items: Vec<Term<'static>>,
ser: Serializer,
}
impl ser::SerializeTuple for SeqSerializer {
@ -222,7 +231,7 @@ impl ser::SerializeTuple for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
@ -239,7 +248,7 @@ impl ser::SerializeTupleStruct for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
@ -256,7 +265,7 @@ impl ser::SerializeTupleVariant for SeqSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
@ -267,6 +276,7 @@ impl ser::SerializeTupleVariant for SeqSerializer {
pub struct ListSerializer {
items: Vec<Term<'static>>,
ser: Serializer,
}
impl ser::SerializeSeq for ListSerializer {
type Ok = Term<'static>;
@ -276,7 +286,7 @@ impl ser::SerializeSeq for ListSerializer {
where
T: ?Sized + Serialize,
{
self.items.push(value.serialize(&mut Serializer)?);
self.items.push(value.serialize(&mut self.ser)?);
Ok(())
}
@ -288,6 +298,7 @@ impl ser::SerializeSeq for ListSerializer {
pub struct MapSerializer {
next: Option<Vec<u8>>,
fields: Vec<(Vec<u8>, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeMap for MapSerializer {
@ -298,7 +309,8 @@ impl ser::SerializeMap for MapSerializer {
where
T: ?Sized + Serialize,
{
self.next = Some(key.serialize(&mut Serializer)?.encode());
let mut ser = Serializer;
self.next = Some(key.serialize(&mut ser)?.encode());
Ok(())
}
@ -310,7 +322,7 @@ impl ser::SerializeMap for MapSerializer {
self.next
.take()
.ok_or_else(|| Self::Error::custom("no key"))?,
value.serialize(&mut Serializer)?,
value.serialize(&mut self.ser)?,
));
Ok(())
}
@ -323,6 +335,7 @@ impl ser::SerializeMap for MapSerializer {
pub struct StructSerializer {
name: &'static str,
fields: Vec<(&'static str, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeStruct for StructSerializer {
@ -333,7 +346,7 @@ impl ser::SerializeStruct for StructSerializer {
where
T: ?Sized + Serialize,
{
self.fields.push((key, value.serialize(&mut Serializer)?));
self.fields.push((key, value.serialize(&mut self.ser)?));
Ok(())
}
@ -346,6 +359,7 @@ pub struct StructVariantSerializer {
name: &'static str,
variant: &'static str,
fields: Vec<(&'static str, Term<'static>)>,
ser: Serializer,
}
impl ser::SerializeStructVariant for StructVariantSerializer {
@ -356,7 +370,7 @@ impl ser::SerializeStructVariant for StructVariantSerializer {
where
T: ?Sized + Serialize,
{
self.fields.push((key, value.serialize(&mut Serializer)?));
self.fields.push((key, value.serialize(&mut self.ser)?));
Ok(())
}