v0.4.0 : change delimiter, etc

This commit is contained in:
Alex 2023-05-10 12:05:25 +02:00
parent 7dc139737c
commit bd11c73899
8 changed files with 104 additions and 103 deletions

View File

@ -2,7 +2,7 @@
name = "nettext"
description = "A text-based data format for cryptographic network protocols"
authors = ["Alex Auvolat <alex@adnab.me>"]
version = "0.3.3"
version = "0.4.0"
edition = "2021"
license = "AGPL-3.0"
readme = "README.md"

View File

@ -13,7 +13,7 @@ A text-based data format for cryptographic network protocols.
A term can be of any of the following kinds:
- a string, which may contain only ASCII alphanumeric terms and `.-_*?`
- a string, which may contain only ASCII alphanumeric characters and a limited subset of other ASCII characters that may not include characters used to represent other kinds of terms
- a dict, which maps strings (as defined above) to any term type
- a list, which may contain any number of any kind of terms (can be mixed)
- a sequence, consistuted of at least two of the above (can be mixed), simply separated by whitespace; sequences cannot be nested
@ -22,15 +22,15 @@ Dicts are represented as follows:
```
{
key1 = value1,
key1 = value1;
key2 = value2
}
```
Dicts are represented as follows:
Lists are represented as follows:
```
[ term1, term2 ]
[ term1; term2 ]
```
Sequences are represented as follows:
@ -43,9 +43,9 @@ As a consequence, complex data structures can be defined as follows:
```
SEND MESSAGE {
topic = blah,
topic = blah;
to = [
TOPIC hello,
TOPIC hello;
USER john
],
body = blah blah

View File

@ -138,11 +138,11 @@ mod tests {
#[test]
fn simple_dict() {
let bytes = b" { aze = hello, by = bojzkz pipo, ccde = ke } ";
let bytes = b" { aze = hello; by = bojzkz pipo; ccde = ke } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo, ccde = ke }",
b"{ aze = hello; by = bojzkz pipo; ccde = ke }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
@ -163,11 +163,11 @@ mod tests {
#[test]
fn simple_dict_2() {
let bytes = b" { aze = hello, by = bojzkz pipo , ccde = ke , } ";
let bytes = b" { aze = hello; by = bojzkz pipo ; ccde = ke ; } ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Dict(
b"{ aze = hello, by = bojzkz pipo , ccde = ke , }",
b"{ aze = hello; by = bojzkz pipo ; ccde = ke ; }",
[
(&b"aze"[..], AnyTerm::Str(b"hello")),
(
@ -188,11 +188,11 @@ mod tests {
#[test]
fn simple_list() {
let bytes = b" [ hello, bojzkz pipo, ke ] ";
let bytes = b" [ hello; bojzkz pipo; ke ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello, bojzkz pipo, ke ]",
b"[ hello; bojzkz pipo; ke ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
@ -209,11 +209,11 @@ mod tests {
#[test]
fn simple_list_2() {
let bytes = b" [ hello, bojzkz pipo , ke , ] ";
let bytes = b" [ hello; bojzkz pipo ; ke ; ] ";
assert_eq!(
decode(bytes),
Ok(AnyTerm::List(
b"[ hello, bojzkz pipo , ke , ]",
b"[ hello; bojzkz pipo ; ke ; ]",
[
AnyTerm::Str(b"hello"),
AnyTerm::Seq(
@ -243,7 +243,7 @@ mod tests {
#[test]
fn real_world_2() {
let bytes = b"STANCE sthash stsign { author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }";
let bytes = b"STANCE sthash stsign { author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }";
assert_eq!(
decode(bytes),
Ok(AnyTerm::Seq(
@ -252,7 +252,7 @@ mod tests {
NonSeqTerm::Str(b"STANCE"),
NonSeqTerm::Str(b"sthash"),
NonSeqTerm::Str(b"stsign"),
NonSeqTerm::Dict(b"{ author = alexpubkey, height = 12, parent = parenthash, data = MESSAGE { text = hello } }",
NonSeqTerm::Dict(b"{ author = alexpubkey; height = 12; parent = parenthash; data = MESSAGE { text = hello } }",
[
(&b"author"[..], AnyTerm::Str(b"alexpubkey")),
(&b"height"[..], AnyTerm::Str(b"12")),
@ -279,7 +279,7 @@ mod tests {
#[test]
fn real_world_3() {
let bytes = b"[ USER john, USER luke, GROUP strategy { owner = USER john, members = [ USER john, USER luke ] } ]";
let bytes = b"[ USER john; USER luke; GROUP strategy { owner = USER john; members = [ USER john; USER luke ] } ]";
let user_john = AnyTerm::Seq(
b"USER john",
vec![NonSeqTerm::Str(b"USER"), NonSeqTerm::Str(b"john")],
@ -297,18 +297,18 @@ mod tests {
user_john.clone(),
user_luke.clone(),
AnyTerm::Seq(
b"GROUP strategy { owner = USER john, members = [ USER john, USER luke ] }",
b"GROUP strategy { owner = USER john; members = [ USER john; USER luke ] }",
vec![
NonSeqTerm::Str(b"GROUP"),
NonSeqTerm::Str(b"strategy"),
NonSeqTerm::Dict(
b"{ owner = USER john, members = [ USER john, USER luke ] }",
b"{ owner = USER john; members = [ USER john; USER luke ] }",
[
(&b"owner"[..], user_john.clone()),
(
&b"members"[..],
AnyTerm::List(
b"[ USER john, USER luke ]",
b"[ USER john; USER luke ]",
vec![user_john, user_luke,]
)
)

View File

@ -101,8 +101,8 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"hello { a = x, b = y }").unwrap();
/// assert_eq!(term.raw_str().unwrap(), "hello { a = x, b = y }");
/// let term = decode(b"hello { a = x; b = y }").unwrap();
/// assert_eq!(term.raw_str().unwrap(), "hello { a = x; b = y }");
/// ```
pub fn raw_str(&self) -> Result<&'a str, TypeError> {
Ok(std::str::from_utf8(self.0.raw())?)
@ -266,7 +266,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2 }").unwrap();
/// let dict = term.dict().unwrap();
/// assert_eq!(dict.get("k1").unwrap().str().unwrap(), "v1");
/// assert_eq!(dict.get("k2").unwrap().str().unwrap(), "v2");
@ -292,7 +292,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k3 = v3 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k3 = v3 }").unwrap();
/// let [s1, s2] = term.dict_of(["k1", "k2"], true).unwrap();
/// assert_eq!(s1.str().unwrap(), "v1");
/// assert_eq!(s2.str().unwrap(), "v2");
@ -334,7 +334,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term = decode(b"{ k1 = v1, k2 = v2, k4 = v4 }").unwrap();
/// let term = decode(b"{ k1 = v1; k2 = v2; k4 = v4 }").unwrap();
/// let [s1, s2, s3] = term.dict_of_opt(["k1", "k2", "k3"], true).unwrap();
/// assert_eq!(s1.unwrap().str().unwrap(), "v1");
/// assert_eq!(s2.unwrap().str().unwrap(), "v2");
@ -370,7 +370,7 @@ impl<'a, 'b> Term<'a, 'b> {
/// ```
/// use nettext::dec::decode;
///
/// let term2 = decode(b"[ hello, world ]").unwrap();
/// let term2 = decode(b"[ hello; world ]").unwrap();
/// let seq2 = term2.list().unwrap();
/// assert_eq!(seq2.len(), 2);
/// assert_eq!(seq2[0].str().unwrap(), "hello");
@ -474,8 +474,8 @@ impl<'a, 'b> Term<'a, 'b> {
/// use nettext::crypto::generichash::GenericHash;
///
/// let term = decode(b"{
/// message = hello,
/// hash = Mk3PAn3UowqTLEQfNlol6GsXPe-kuOWJSCU0cbgbcs8,
/// message = hello;
/// hash = Mk3PAn3UowqTLEQfNlol6GsXPe-kuOWJSCU0cbgbcs8;
/// }").unwrap();
/// let [msg, hash] = term.dict_of(["message", "hash"], false).unwrap();
/// let expected_hash = GenericHash::hash_with_defaults(msg.raw(), None::<&Vec<u8>>).unwrap();

View File

@ -11,7 +11,7 @@
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! ]).unwrap().encode();
@ -22,6 +22,7 @@ mod error;
use std::borrow::{Borrow, Cow};
use std::collections::HashMap;
use crate::*;
use crate::dec::{self, decode};
use crate::{is_string_char, is_whitespace, switch64, BytesEncoding};
@ -87,7 +88,7 @@ pub fn string_owned(s: String) -> Result<'static> {
/// ```
/// use nettext::enc::*;
///
/// assert_eq!(raw(b"Hello { a = b, c = d} .").unwrap().encode(), b"Hello { a = b, c = d} .");
/// assert_eq!(raw(b"Hello { a = b; c = d} .").unwrap().encode(), b"Hello { a = b; c = d} .");
/// ```
pub fn raw(bytes: &[u8]) -> Result<'_> {
if decode(bytes).is_err() {
@ -202,7 +203,7 @@ pub fn seq_flatten<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
/// assert_eq!(list([
/// string("Hello").unwrap(),
/// string("world").unwrap()
/// ]).encode(), b"[\n Hello,\n world,\n]");
/// ]).encode(), b"[\n Hello;\n world;\n]");
/// ```
pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
let terms = terms.into_iter().map(|x| x.0).collect::<Vec<_>>();
@ -217,7 +218,7 @@ pub fn list<'a, I: IntoIterator<Item = Term<'a>>>(terms: I) -> Term<'a> {
/// assert_eq!(dict([
/// ("a", string("Hello").unwrap()),
/// ("b", string("world").unwrap())
/// ]).unwrap().encode(), b"{\n a = Hello,\n b = world,\n}");
/// ]).unwrap().encode(), b"{\n a = Hello;\n b = world;\n}");
/// ```
pub fn dict<'a, I: IntoIterator<Item = (&'a str, Term<'a>)>>(pairs: I) -> Result<'a> {
let mut tmp = HashMap::new();
@ -311,16 +312,16 @@ impl<'a> T<'a> {
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
if d.is_empty() {
buf.extend_from_slice(b"{}");
buf.extend_from_slice(&[DICT_OPEN, DICT_CLOSE]);
} else if d.len() == 1 {
let (k, v) = d.into_iter().next().unwrap();
buf.extend_from_slice(b"{ ");
buf.extend_from_slice(&[DICT_OPEN, b' ']);
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(b" }");
buf.extend_from_slice(&[b' ', DICT_CLOSE]);
} else {
buf.extend_from_slice(b"{\n");
buf.extend_from_slice(&[DICT_OPEN, b'\n']);
let indent2 = indent + 2;
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
@ -330,40 +331,40 @@ impl<'a> T<'a> {
buf.push(b' ');
}
buf.extend_from_slice(k.borrow());
buf.extend_from_slice(b" = ");
buf.extend_from_slice(&[b' ', DICT_ASSIGN, b' ']);
v.encode_aux(buf, indent2, false);
buf.extend_from_slice(b",\n");
buf.extend_from_slice(&[DICT_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(b'}');
buf.push(DICT_CLOSE);
}
}
T::List(l) => {
if l.len() == 0 {
buf.extend_from_slice(b"[]");
buf.extend_from_slice(&[LIST_OPEN, LIST_CLOSE]);
} else if l.len() == 1 {
buf.extend_from_slice(b"[ ");
buf.extend_from_slice(&[LIST_OPEN, b' ']);
l.into_iter()
.next()
.unwrap()
.encode_aux(buf, indent + 2, false);
buf.extend_from_slice(b" ]");
buf.extend_from_slice(&[b' ', LIST_CLOSE]);
} else {
let indent2 = indent + 2;
buf.extend_from_slice(b"[\n");
buf.extend_from_slice(&[LIST_OPEN, b'\n']);
for item in l {
for _ in 0..indent2 {
buf.push(b' ');
}
item.encode_aux(buf, indent2, false);
buf.extend_from_slice(b",\n");
buf.extend_from_slice(&[LIST_DELIM, b'\n']);
}
for _ in 0..indent {
buf.push(b' ');
}
buf.push(b']');
buf.push(LIST_CLOSE);
}
}
T::Seq(l) => {
@ -388,29 +389,29 @@ impl<'a> T<'a> {
T::Str(s) => buf.extend_from_slice(s),
T::OwnedStr(s) => buf.extend_from_slice(&s),
T::Dict(mut d) => {
buf.push(b'{');
buf.push(DICT_OPEN);
let mut keys = d.keys().cloned().collect::<Vec<_>>();
keys.sort();
for (i, k) in keys.into_iter().enumerate() {
if i > 0 {
buf.push(b',');
buf.push(DICT_DELIM);
}
let v = d.remove(&k).unwrap();
buf.extend_from_slice(k.borrow());
buf.push(b'=');
buf.push(DICT_ASSIGN);
v.encode_concise_aux(buf);
}
buf.push(b'}');
buf.push(DICT_CLOSE);
}
T::List(l) => {
buf.push(b'[');
buf.push(LIST_OPEN);
for (i, item) in l.into_iter().enumerate() {
if i > 0 {
buf.push(b',');
buf.push(LIST_DELIM);
}
item.encode_concise_aux(buf);
}
buf.push(b']');
buf.push(LIST_CLOSE);
}
T::Seq(l) => {
for (i, v) in l.into_iter().enumerate() {
@ -438,19 +439,19 @@ mod tests {
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop, f2 = kuko }").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap(),
])
.unwrap();
let expected = "HELLO alexhelloworld [
dude,
why,
dude;
why;
] {
data = { f1 = plop, f2 = kuko },
from = jxx,
subject = hello,
data = { f1 = plop; f2 = kuko };
from = jxx;
subject = hello;
}";
assert_eq!(debug(&input.encode()), expected);
}
@ -464,12 +465,12 @@ mod tests {
dict([
("from", string("jxx").unwrap()),
("subject", string("hello").unwrap()),
("data", raw(b"{ f1 = plop, f2 = kuko }").unwrap()),
("data", raw(b"{ f1 = plop; f2 = kuko }").unwrap()),
])
.unwrap(),
])
.unwrap();
let expected_concise = "HELLO alexhelloworld [dude,why] {data={ f1 = plop, f2 = kuko },from=jxx,subject=hello}";
let expected_concise = "HELLO alexhelloworld [dude;why] {data={ f1 = plop; f2 = kuko };from=jxx;subject=hello}";
assert_eq!(debug(&input.encode_concise()), expected_concise);
}
}

View File

@ -15,7 +15,7 @@
//! dict([
//! ("a", string("hello").unwrap()),
//! ("b", string("world").unwrap()),
//! ("c", raw(b"{ a = 12, b = 42 }").unwrap()),
//! ("c", raw(b"{ a = 12; b = 42 }").unwrap()),
//! ("d", bytes_split(&((0..128u8).collect::<Vec<_>>()))),
//! ]).unwrap(),
//! keypair.public_key.term().unwrap(),
@ -52,33 +52,32 @@
//! assert_eq!(arg1.string().unwrap(), "myfunction");
//! ```
//!
//! The value of `text1` would be as follows:
//! The value of `signed_payload` would be as follows:
//!
//! ```raw
//!
//! CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } M3_to5OQ5TvIVyoRXTSK4Jz-zvSqsuh3a68haud_8Vs
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } ZCkE-mTMlK3355u_0UzabRbSNcNO3CWAur7dAhglYtI
//! ```
//!
//! And the value of `text2` would be as follows:
//! And the value of `final_payload` would be as follows:
//! ```raw
//! {
//! hash = Hxpas10VnFIq8WIWGmQk7YLbxT-OMIkg0-sKSBJnUuo,
//! hash = fTTk8Hm0HLGwaskCIqFBzRVMrVTeXGetmNBK2X3pNyY;
//! payload = CALL myfunction {
//! a = hello,
//! b = world,
//! c = { a = 12, b = 42 },
//! a = hello;
//! b = world;
//! c = { a = 12; b = 42 };
//! d = AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4v
//! MDEyMzQ1Njc4OTo7PD0-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5f
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8,
//! } M3_to5OQ5TvIVyoRXTSK4Jz-zvSqsuh3a68haud_8Vs,
//! signature = DAgQDqxi6rDEkGVoUmfHexWUCFYKNbQR0Fgp3_EiaMxiFLeQdy3w3If_lsYqDDmWHYR51whfaNGQZ6PxVthMAA,
//! YGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8;
//! } ZCkE-mTMlK3355u_0UzabRbSNcNO3CWAur7dAhglYtI;
//! signature = XPMrlhAIMfZb6a5Fh5F_ZaEf61olJ1hK4I2kh7vEPT1n20S-943X5cH35bb0Bfwkvy_ENfOTbb3ep1zn2lSIBg;
//! }
//! ```
//!
@ -129,18 +128,19 @@ impl BytesEncoding {
pub(crate) const DICT_OPEN: u8 = b'{';
pub(crate) const DICT_CLOSE: u8 = b'}';
pub(crate) const DICT_ASSIGN: u8 = b'=';
pub(crate) const DICT_DELIM: u8 = b',';
pub(crate) const DICT_DELIM: u8 = b';';
pub(crate) const LIST_OPEN: u8 = b'[';
pub(crate) const LIST_CLOSE: u8 = b']';
pub(crate) const LIST_DELIM: u8 = b',';
pub(crate) const STR_EXTRA_CHARS: &[u8] = b"._-+*?@:/\\";
pub(crate) const LIST_DELIM: u8 = b';';
const BASE_EXTRA_CHARS: &[u8] = br#".,:?!@$^<>|&#"'_-+*/%"#;
const STR_EXTRA_CHARS: &[u8] = b"\\";
pub(crate) const SWITCH64_SEPARATOR: u8 = b'\\';
pub(crate) const SWITCH64_EXTRA_CHARS: &[u8] = b"._-+*?@:/";
pub(crate) const SWITCH64_EXTRA_CHARS: &[u8] = BASE_EXTRA_CHARS;
#[inline]
pub(crate) fn is_string_char(c: u8) -> bool {
c.is_ascii_alphanumeric() || STR_EXTRA_CHARS.contains(&c)
c.is_ascii_alphanumeric() || BASE_EXTRA_CHARS.contains(&c) || STR_EXTRA_CHARS.contains(&c)
}
#[inline]

View File

@ -33,16 +33,16 @@ mod tests {
assert_eq!(ser_concise, expected_concise);
assert_eq!(from_bytes::<T>(ser_concise.as_bytes()).unwrap(), input);
let ser_str_hex = input
.serialize(&mut Serializer {
string_format: BytesEncoding::Switch64 {
allow_whitespace: true,
},
bytes_format: BytesEncoding::Hex { split: true },
})
.unwrap()
.encode();
panic!("{}", debug(&ser_str_hex));
// let ser_str_hex = input
// .serialize(&mut Serializer {
// string_format: BytesEncoding::Switch64 {
// allow_whitespace: true,
// },
// bytes_format: BytesEncoding::Hex { split: true },
// })
// .unwrap()
// .encode();
// panic!("{}", debug(&ser_str_hex));
}
#[test]
@ -57,7 +57,7 @@ mod tests {
int: 1,
seq: vec!["a".to_string(), "b".to_string()],
};
let expected = r#"Test {int=1,seq=[YQ,Yg]}"#;
let expected = r#"Test {int=1;seq=[YQ;Yg]}"#;
test_bidir(input, expected);
let input = vec![
@ -70,7 +70,7 @@ mod tests {
seq: vec!["c".to_string(), "d".to_string()],
},
];
let expected = r#"[Test {int=1,seq=[YQ,Yg]},Test {int=2,seq=[Yw,ZA]}]"#;
let expected = r#"[Test {int=1;seq=[YQ;Yg]};Test {int=2;seq=[Yw;ZA]}]"#;
test_bidir(input, expected);
}
@ -108,7 +108,7 @@ mod tests {
E::Struct { a: 1 },
E::Tuple(3, 2),
];
let expected = r#"[E.Unit,E.Unit,E.Newtype 1,E.Tuple 1 2,E.Struct {a=1},E.Tuple 3 2]"#;
let expected = r#"[E.Unit;E.Unit;E.Newtype 1;E.Tuple 1 2;E.Struct {a=1};E.Tuple 3 2]"#;
test_bidir(input, expected);
}
@ -122,14 +122,14 @@ mod tests {
#[test]
fn test_list() {
let input = vec![1, 2, 3, 4];
let expected = r#"[1,2,3,4]"#;
let expected = r#"[1;2;3;4]"#;
test_bidir(input, expected);
}
#[test]
fn test_seqlist() {
let input = vec![(1, 2), (2, 3), (3, 4), (5, 6)];
let expected = r#"[1 2,2 3,3 4,5 6]"#;
let expected = r#"[1 2;2 3;3 4;5 6]"#;
test_bidir(input, expected);
}
@ -138,13 +138,13 @@ mod tests {
let mut input = HashMap::new();
input.insert("hello".to_string(), "world".to_string());
input.insert("dont".to_string(), "panic".to_string());
let expected = r#"{ZG9udA=cGFuaWM,aGVsbG8=d29ybGQ}"#;
let expected = r#"{ZG9udA=cGFuaWM;aGVsbG8=d29ybGQ}"#;
test_bidir(input, expected);
let mut input = HashMap::new();
input.insert(12, vec![42, 125]);
input.insert(33, vec![19, 22, 21]);
let expected = r#"{12=[42,125],33=[19,22,21]}"#;
let expected = r#"{12=[42;125];33=[19;22;21]}"#;
test_bidir(input, expected);
}
}

View File

@ -105,9 +105,8 @@ mod tests {
assert_eq!(debug(&encode(&b"hello world"[..], true)), "hello world");
assert_eq!(
debug(&encode(&b"hello, world!"[..], true)),
"hello\\LA\\ world\\IQ"
"hello, world!"
);
assert_eq!(debug(&encode(&b",;,@$;8"[..], true)), "\\LDssQCQ7OA");
}
#[test]
@ -125,7 +124,8 @@ mod tests {
for s in [
br#"assert_eq!(debug(&decode(&b"hello\\LA\\ world\\IQ"[..]).unwrap()), "hello, world!");"#.to_vec(),
br#"- a list, which may contain any number of any kind of terms (can be mixed)"#.to_vec(),
base64::decode("dVcG5EzJqGP/2ZGkVu4ewzfAug1W96tb2KiBOVyPUXfw8uD34DEepW/PPqRzi0HL").unwrap()
base64::decode("dVcG5EzJqGP/2ZGkVu4ewzfAug1W96tb2KiBOVyPUXfw8uD34DEepW/PPqRzi0HL").unwrap(),
br#",;,@$;8"#.to_vec()
] {
assert_eq!(decode(&encode(&s, true)).unwrap(), s);
assert_eq!(decode(&encode(&s, false)).unwrap(), s);