Full encode/decode example

This commit is contained in:
Alex 2022-11-17 23:58:44 +01:00
parent 79a6109403
commit bf988bec2f
Signed by: lx
GPG key ID: 0E496D15096376BE
7 changed files with 254 additions and 80 deletions

View file

@ -14,6 +14,7 @@ nom = "7.1"
base64 = "0.13" base64 = "0.13"
blake2 = { version = "0.10", optional = true } blake2 = { version = "0.10", optional = true }
rand = "0.7"
ed25519-dalek = { version = "1.0", optional = true } ed25519-dalek = { version = "1.0", optional = true }
[features] [features]

View file

@ -1,6 +1,7 @@
use blake2::{Blake2b512, Digest}; use blake2::{Blake2b512, Digest};
use crate::crypto::CryptoError; use crate::crypto::CryptoError;
use crate::enc;
/// A Blake2b512 digest /// A Blake2b512 digest
#[derive(Clone, Copy, Eq, PartialEq)] #[derive(Clone, Copy, Eq, PartialEq)]
@ -20,11 +21,22 @@ impl Blake2Sum {
} }
/// Check that this digest corresponds to a given slice /// Check that this digest corresponds to a given slice
pub fn check(&self, buf: &[u8]) -> Result<(), CryptoError> { pub fn verify(&self, buf: &[u8]) -> Result<(), CryptoError> {
if Self::compute(buf) == *self { if Self::compute(buf) == *self {
Ok(()) Ok(())
} else { } else {
Err(CryptoError::InvalidHash) Err(CryptoError::InvalidHash)
} }
} }
/// Return a reference to the inner byte slice
pub fn as_bytes(&self) -> &[u8] {
&self.0[..]
}
}
impl enc::Encode for Blake2Sum {
fn term(&self) -> enc::Term<'_> {
enc::bytes(self.as_bytes())
}
} }

View file

@ -1 +1,34 @@
pub use ed25519_dalek::{Keypair, PublicKey, SecretKey, Signature, Signer}; use rand::prelude::*;
use crate::enc;
pub use ed25519_dalek::{Keypair, PublicKey, SecretKey, Signature, Signer, Verifier};
pub fn generate_keypair() -> Keypair {
let mut csprng = thread_rng();
Keypair::generate(&mut csprng)
}
impl enc::Encode for Keypair {
fn term(&self) -> enc::Term<'_> {
enc::bytes(&self.to_bytes())
}
}
impl enc::Encode for PublicKey {
fn term(&self) -> enc::Term<'_> {
enc::bytes(self.as_bytes())
}
}
impl enc::Encode for SecretKey {
fn term(&self) -> enc::Term<'_> {
enc::bytes(self.as_bytes())
}
}
impl enc::Encode for Signature {
fn term(&self) -> enc::Term<'_> {
enc::bytes(&self.to_bytes())
}
}

View file

@ -19,4 +19,3 @@ pub enum CryptoError {
/// A signature verification failed /// A signature verification failed
InvalidSignature, InvalidSignature,
} }

View file

@ -430,7 +430,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// hash = 5M-jmj03vjHFlgnoB5cHmcqmihm_qhUTXxZQheAdQaZboeGxRq62vQCStJ6sIUwQPM-jo2WVS7vlL3Sis2IMlA /// hash = 5M-jmj03vjHFlgnoB5cHmcqmihm_qhUTXxZQheAdQaZboeGxRq62vQCStJ6sIUwQPM-jo2WVS7vlL3Sis2IMlA
/// }").unwrap(); /// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap(); /// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// assert!(hash.b2sum().unwrap().check(msg.raw()).is_ok()); /// assert!(hash.b2sum().unwrap().verify(msg.raw()).is_ok());
/// ``` /// ```
pub fn b2sum(&self) -> Result<crypto::Blake2Sum, TypeError> { pub fn b2sum(&self) -> Result<crypto::Blake2Sum, TypeError> {
Ok(crypto::Blake2Sum::from_bytes(self.bytes_exact()?)) Ok(crypto::Blake2Sum::from_bytes(self.bytes_exact()?))

View file

@ -2,9 +2,10 @@
use std::collections::HashMap; use std::collections::HashMap;
use crate::dec::decode; use crate::dec::{self, decode};
use crate::{is_string_char, is_whitespace}; use crate::{is_string_char, is_whitespace};
/// A term meant to be encoded into a nettext representation
pub struct Term<'a>(T<'a>); pub struct Term<'a>(T<'a>);
enum T<'a> { enum T<'a> {
@ -12,8 +13,10 @@ enum T<'a> {
OwnedStr(Vec<u8>), OwnedStr(Vec<u8>),
Dict(HashMap<&'a [u8], T<'a>>), Dict(HashMap<&'a [u8], T<'a>>),
List(Vec<T<'a>>), List(Vec<T<'a>>),
Err(Error),
} }
/// An error that happenned when creating a nettext encoder term
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
InvalidCharacter(u8), InvalidCharacter(u8),
@ -21,22 +24,35 @@ pub enum Error {
NotADictionnary, NotADictionnary,
} }
// ---- helpers to transform datatypes into encoder terms ----
/// Trait for anything that can be encoded as nettext
pub trait Encode {
fn term(&self) -> Term<'_>;
}
impl<'a, 'b> Encode for dec::Term<'a, 'b> {
fn term(&self) -> Term<'_> {
Term(T::Str(self.raw()))
}
}
// ---- helpers to build terms ---- // ---- helpers to build terms ----
/// Encode a string (may contain whitespace) /// Term corresponding to a string (that may contain whitespace)
/// ///
/// ``` /// ```
/// use nettext::enc::*; /// use nettext::enc::*;
/// ///
/// assert_eq!(encode(&string("Hello world .").unwrap()), b"Hello world ."); /// assert_eq!(encode(string("Hello world .")).unwrap(), b"Hello world .");
/// ``` /// ```
pub fn string(s: &str) -> Result<Term<'_>, Error> { pub fn string(s: &str) -> Term<'_> {
for c in s.as_bytes().iter() { for c in s.as_bytes().iter() {
if !(is_string_char(*c) || is_whitespace(*c)) { if !(is_string_char(*c) || is_whitespace(*c)) {
return Err(Error::InvalidCharacter(*c)); return Term(T::Err(Error::InvalidCharacter(*c)));
} }
} }
Ok(Term(T::Str(s.as_bytes()))) Term(T::Str(s.as_bytes()))
} }
/// Include a raw nettext value /// Include a raw nettext value
@ -44,95 +60,133 @@ pub fn string(s: &str) -> Result<Term<'_>, Error> {
/// ``` /// ```
/// use nettext::enc::*; /// use nettext::enc::*;
/// ///
/// assert_eq!(encode(&raw(b"Hello { a = b, c = d} .").unwrap()), b"Hello { a = b, c = d} ."); /// assert_eq!(encode(raw(b"Hello { a = b, c = d} .")).unwrap(), b"Hello { a = b, c = d} .");
/// ``` /// ```
pub fn raw(bytes: &[u8]) -> Result<Term<'_>, Error> { pub fn raw(bytes: &[u8]) -> Term<'_> {
if decode(bytes).is_err() { if decode(bytes).is_err() {
return Err(Error::InvalidRaw); return Term(T::Err(Error::InvalidRaw));
} }
Ok(Term(T::Str(bytes))) Term(T::Str(bytes))
} }
/// Encode a list of items /// Term corresponding to a list of terms
/// ///
/// ``` /// ```
/// use nettext::enc::*; /// use nettext::enc::*;
/// ///
/// assert_eq!(encode(&list([ /// assert_eq!(encode(list([
/// string("Hello").unwrap(), /// string("Hello"),
/// string("world").unwrap() /// string("world")
/// ])), b"Hello world"); /// ])).unwrap(), b"Hello world");
/// ``` /// ```
pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> { pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
Term(T::List(terms.into_iter().map(|x| x.0).collect())) let mut tmp = Vec::with_capacity(8);
for t in terms {
match t.0 {
T::Err(e) => return Term(T::Err(e)),
x => tmp.push(x),
}
}
Term(T::List(tmp))
} }
/// Encode a list of items /// Term corresponding to a dictionnary of items
/// ///
/// ``` /// ```
/// use nettext::enc::*; /// use nettext::enc::*;
/// ///
/// assert_eq!(encode(&dict([ /// assert_eq!(encode(dict([
/// ("a", string("Hello").unwrap()), /// ("a", string("Hello")),
/// ("b", string("world").unwrap()) /// ("b", string("world"))
/// ])), b"{\n a = Hello,\n b = world,\n}"); /// ])).unwrap(), b"{\n a = Hello,\n b = world,\n}");
/// ``` /// ```
pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Term<'a> { pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Term<'a> {
Term(T::Dict( let mut tmp = HashMap::new();
pairs for (k, v) in pairs {
.into_iter() match v.0 {
.map(|(k, v)| (k.as_bytes(), v.0)) T::Err(e) => return Term(T::Err(e)),
.collect(), vv => {
tmp.insert(k.as_bytes(), vv);
}
}
}
Term(T::Dict(tmp))
}
/// Term corresponding to a byte slice,
/// encoding using base64 url-safe encoding without padding
///
/// Example:
///
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(encode(bytes(b"hello, world!")).unwrap(), b"aGVsbG8sIHdvcmxkIQ");
/// ```
pub fn bytes(b: &[u8]) -> Term<'static> {
Term(T::OwnedStr(
base64::encode_config(b, base64::URL_SAFE_NO_PAD).into_bytes(),
)) ))
} }
impl<'a> Term<'a> { impl<'a> Term<'a> {
pub fn push(self, t: Term<'a>) -> Term<'a> { /// Append a term to an existing term.
match self.0 { /// Transforms the initial term into a list if necessary.
pub fn append(self, t: Term<'a>) -> Term<'a> {
match t.0 {
T::Err(e) => Term(T::Err(e)),
tt => match self.0 {
T::List(mut v) => { T::List(mut v) => {
v.push(t.0); v.push(tt);
Term(T::List(v)) Term(T::List(v))
} }
x => Term(T::List(vec![x, t.0])), x => Term(T::List(vec![x, tt])),
},
} }
} }
pub fn insert(self, k: &'a str, v: Term<'a>) -> Result<Term<'a>, Error> { /// Inserts a key-value pair into a term that is a dictionnary.
match self.0 { /// Fails if `self` is not a dictionnary.
pub fn insert(self, k: &'a str, v: Term<'a>) -> Term<'a> {
match v.0 {
T::Err(e) => Term(T::Err(e)),
vv => match self.0 {
T::Dict(mut d) => { T::Dict(mut d) => {
d.insert(k.as_bytes(), v.0); d.insert(k.as_bytes(), vv);
Ok(Term(T::Dict(d))) Term(T::Dict(d))
} }
_ => Err(Error::NotADictionnary), _ => Term(T::Err(Error::NotADictionnary)),
},
} }
} }
} }
// ---- encoding function ---- // ---- encoding function ----
pub fn encode<'a>(t: &Term<'a>) -> Vec<u8> { /// Generate the nettext representation of a term
pub fn encode<'a>(t: Term<'a>) -> Result<Vec<u8>, Error> {
let mut buf = Vec::with_capacity(128); let mut buf = Vec::with_capacity(128);
encode_aux(&mut buf, &t.0, 0); encode_aux(&mut buf, t.0, 0)?;
buf Ok(buf)
} }
fn encode_aux<'a>(buf: &mut Vec<u8>, term: &T<'a>, indent: usize) { fn encode_aux<'a>(buf: &mut Vec<u8>, term: T<'a>, indent: usize) -> Result<(), Error> {
match term { match term {
T::Str(s) => buf.extend_from_slice(s), T::Str(s) => buf.extend_from_slice(s),
T::OwnedStr(s) => buf.extend_from_slice(&s), T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(d) => { T::Dict(mut d) => {
buf.extend_from_slice(b"{\n"); buf.extend_from_slice(b"{\n");
let indent2 = indent + 2; let indent2 = indent + 2;
let mut keys = d.keys().collect::<Vec<_>>(); let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort(); keys.sort();
for k in keys { for k in keys {
let v = d.get(k).unwrap(); let v = d.remove(k).unwrap();
for _ in 0..indent2 { for _ in 0..indent2 {
buf.push(b' '); buf.push(b' ');
} }
buf.extend_from_slice(k); buf.extend_from_slice(k);
buf.extend_from_slice(b" = "); buf.extend_from_slice(b" = ");
encode_aux(buf, v, indent2); encode_aux(buf, v, indent2)?;
buf.extend_from_slice(b",\n"); buf.extend_from_slice(b",\n");
} }
for _ in 0..indent { for _ in 0..indent {
@ -142,7 +196,7 @@ fn encode_aux<'a>(buf: &mut Vec<u8>, term: &T<'a>, indent: usize) {
} }
T::List(l) => { T::List(l) => {
let indent2 = indent + 2; let indent2 = indent + 2;
for (i, v) in l.iter().enumerate() { for (i, v) in l.into_iter().enumerate() {
if buf.iter().rev().take_while(|c| **c != b'\n').count() > 80 { if buf.iter().rev().take_while(|c| **c != b'\n').count() > 80 {
buf.push(b'\n'); buf.push(b'\n');
for _ in 0..indent2 { for _ in 0..indent2 {
@ -151,10 +205,12 @@ fn encode_aux<'a>(buf: &mut Vec<u8>, term: &T<'a>, indent: usize) {
} else if i > 0 { } else if i > 0 {
buf.push(b' '); buf.push(b' ');
} }
encode_aux(buf, v, indent2); encode_aux(buf, v, indent2)?;
} }
} }
T::Err(e) => return Err(e),
} }
Ok(())
} }
#[cfg(test)] #[cfg(test)]
@ -164,12 +220,12 @@ mod tests {
#[test] #[test]
fn complex1() { fn complex1() {
let input = list([ let input = list([
string("HELLO").unwrap(), string("HELLO"),
string("alexhelloworld").unwrap(), string("alexhelloworld"),
dict([ dict([
("from", string("jxx").unwrap()), ("from", string("jxx")),
("subject", string("hello").unwrap()), ("subject", string("hello")),
("data", raw(b"{ f1 = plop, f2 = kuko }").unwrap()), ("data", raw(b"{ f1 = plop, f2 = kuko }")),
]), ]),
]); ]);
let expected = b"HELLO alexhelloworld { let expected = b"HELLO alexhelloworld {
@ -177,9 +233,9 @@ mod tests {
from = jxx, from = jxx,
subject = hello, subject = hello,
}"; }";
let enc = encode(&input); let enc = encode(input).unwrap();
eprintln!("{}", std::str::from_utf8(&enc).unwrap()); eprintln!("{}", std::str::from_utf8(&enc).unwrap());
eprintln!("{}", std::str::from_utf8(&expected[..]).unwrap()); eprintln!("{}", std::str::from_utf8(&expected[..]).unwrap());
assert_eq!(encode(&input), expected); assert_eq!(&enc, &expected[..]);
} }
} }

View file

@ -1,3 +1,76 @@
//! A text-based data format for cryptographic network protocols.
//!
//! ```
//! use nettext::enc::*;
//! use nettext::dec::*;
//! use nettext::crypto::{self, Signer, Verifier};
//!
//! let keypair = crypto::generate_keypair();
//!
//! // Encode a fist object that represents a payload that will be hashed and signed
//! let text1 = encode(list([
//! string("CALL"),
//! string("myfunction"),
//! dict([
//! ("a", string("hello")),
//! ("b", string("world")),
//! ("c", raw(b"{ a = 12, b = 42 }")),
//! ]),
//! keypair.public.term(),
//! ])).unwrap();
//! eprintln!("{}", std::str::from_utf8(&text1).unwrap());
//!
//! let hash = crypto::Blake2Sum::compute(&text1);
//! let sign = keypair.sign(&text1);
//!
//! // Encode a second object that represents the signed and hashed payload
//! let text2 = encode(dict([
//! ("hash", hash.term()),
//! ("signature", sign.term()),
//! ("payload", raw(&text1)),
//! ])).unwrap();
//! eprintln!("{}", std::str::from_utf8(&text2).unwrap());
//!
//! // Decode and check everything is fine
//! let object1 = decode(&text2).unwrap();
//! let [hash, signature, payload] = object1.dict_of(["hash", "signature", "payload"], false).unwrap();
//! assert!(hash.b2sum().unwrap().verify(payload.raw()).is_ok());
//! assert_eq!(payload.raw(), text1);
//!
//! let object2 = decode(payload.raw()).unwrap();
//!
//! let [verb, arg1, arg2, pubkey] = object2.list_of().unwrap();
//! let pubkey = pubkey.public_key().unwrap();
//! assert!(pubkey.verify(payload.raw(), &signature.signature().unwrap()).is_ok());
//!
//! assert_eq!(verb.string().unwrap(), "CALL");
//! assert_eq!(arg1.string().unwrap(), "myfunction");
//! assert_eq!(pubkey, keypair.public);
//! ```
//!
//! The value of `text1` would be as follows:
//!
//! ```raw
//! CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! } gTjRjHtSX6OCwq3pdl9Bpg6M2h-2WkciKi0uNV8NQX0
//! ```
//!
//! And the value of `text2` would be as follows:
//! ```raw
//! {
//! hash = BEBZp98KF_d1rvBd5Ib8q1w_oGvrvIcKRXFv9kMB0ewOWH42OPd8qa0V_2ranV92z0mEdswftqvpAYebziTIew,
//! payload = CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! } gTjRjHtSX6OCwq3pdl9Bpg6M2h-2WkciKi0uNV8NQX0,
//! signature = rAwIUTsCdoB_4eqo7r5e_J5ZHFaxHnXi99oNWi7h7y0mRfgt5u7-qXn7spIN1GcmDWYh4EPzoY34Br-sRxi0AA,
//! }
//! ```
pub mod crypto; pub mod crypto;
pub mod dec; pub mod dec;
pub mod enc; pub mod enc;