rename hash() to sha256sum(), we might want to change it at some places
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
e59322041a
commit
b1b640ae8b
9 changed files with 30 additions and 32 deletions
|
@ -55,7 +55,7 @@ pub async fn handle_put(
|
||||||
let md5sum_arr = md5sum.finalize();
|
let md5sum_arr = md5sum.finalize();
|
||||||
let md5sum_hex = hex::encode(md5sum_arr);
|
let md5sum_hex = hex::encode(md5sum_arr);
|
||||||
|
|
||||||
let sha256sum_hash = hash(&first_block[..]);
|
let sha256sum_hash = sha256sum(&first_block[..]);
|
||||||
|
|
||||||
ensure_checksum_matches(
|
ensure_checksum_matches(
|
||||||
md5sum_arr.as_slice(),
|
md5sum_arr.as_slice(),
|
||||||
|
@ -95,7 +95,7 @@ pub async fn handle_put(
|
||||||
|
|
||||||
// Initialize corresponding entry in version table
|
// Initialize corresponding entry in version table
|
||||||
let version = Version::new(version_uuid, bucket.into(), key.into(), false, vec![]);
|
let version = Version::new(version_uuid, bucket.into(), key.into(), false, vec![]);
|
||||||
let first_block_hash = hash(&first_block[..]);
|
let first_block_hash = sha256sum(&first_block[..]);
|
||||||
|
|
||||||
// Transfer data and verify checksum
|
// Transfer data and verify checksum
|
||||||
let tx_result = read_and_put_blocks(
|
let tx_result = read_and_put_blocks(
|
||||||
|
@ -180,10 +180,10 @@ async fn read_and_put_blocks(
|
||||||
first_block_hash: Hash,
|
first_block_hash: Hash,
|
||||||
chunker: &mut BodyChunker,
|
chunker: &mut BodyChunker,
|
||||||
) -> Result<(u64, GenericArray<u8, typenum::U16>, Hash), Error> {
|
) -> Result<(u64, GenericArray<u8, typenum::U16>, Hash), Error> {
|
||||||
let mut md5sum = Md5::new();
|
let mut md5hasher = Md5::new();
|
||||||
let mut sha256sum = Sha256::new();
|
let mut sha256hasher = Sha256::new();
|
||||||
md5sum.update(&first_block[..]);
|
md5hasher.update(&first_block[..]);
|
||||||
sha256sum.input(&first_block[..]);
|
sha256hasher.input(&first_block[..]);
|
||||||
|
|
||||||
let mut next_offset = first_block.len();
|
let mut next_offset = first_block.len();
|
||||||
let mut put_curr_version_block = put_block_meta(
|
let mut put_curr_version_block = put_block_meta(
|
||||||
|
@ -202,9 +202,9 @@ async fn read_and_put_blocks(
|
||||||
let (_, _, next_block) =
|
let (_, _, next_block) =
|
||||||
futures::try_join!(put_curr_block, put_curr_version_block, chunker.next())?;
|
futures::try_join!(put_curr_block, put_curr_version_block, chunker.next())?;
|
||||||
if let Some(block) = next_block {
|
if let Some(block) = next_block {
|
||||||
md5sum.update(&block[..]);
|
md5hasher.update(&block[..]);
|
||||||
sha256sum.input(&block[..]);
|
sha256hasher.input(&block[..]);
|
||||||
let block_hash = hash(&block[..]);
|
let block_hash = sha256sum(&block[..]);
|
||||||
let block_len = block.len();
|
let block_len = block.len();
|
||||||
put_curr_version_block = put_block_meta(
|
put_curr_version_block = put_block_meta(
|
||||||
garage.clone(),
|
garage.clone(),
|
||||||
|
@ -222,14 +222,14 @@ async fn read_and_put_blocks(
|
||||||
}
|
}
|
||||||
|
|
||||||
let total_size = next_offset as u64;
|
let total_size = next_offset as u64;
|
||||||
let md5sum_arr = md5sum.finalize();
|
let md5sum_arr = md5hasher.finalize();
|
||||||
|
|
||||||
let sha256sum = sha256sum.result();
|
let sha256sum_arr = sha256hasher.result();
|
||||||
let mut hash = [0u8; 32];
|
let mut hash = [0u8; 32];
|
||||||
hash.copy_from_slice(&sha256sum[..]);
|
hash.copy_from_slice(&sha256sum_arr[..]);
|
||||||
let sha256sum = Hash::from(hash);
|
let sha256sum_arr = Hash::from(hash);
|
||||||
|
|
||||||
Ok((total_size, md5sum_arr, sha256sum))
|
Ok((total_size, md5sum_arr, sha256sum_arr))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn put_block_meta(
|
async fn put_block_meta(
|
||||||
|
@ -390,7 +390,7 @@ pub async fn handle_put_part(
|
||||||
|
|
||||||
// Copy block to store
|
// Copy block to store
|
||||||
let version = Version::new(version_uuid, bucket, key, false, vec![]);
|
let version = Version::new(version_uuid, bucket, key, false, vec![]);
|
||||||
let first_block_hash = hash(&first_block[..]);
|
let first_block_hash = sha256sum(&first_block[..]);
|
||||||
let (_, md5sum_arr, sha256sum) = read_and_put_blocks(
|
let (_, md5sum_arr, sha256sum) = read_and_put_blocks(
|
||||||
&garage,
|
&garage,
|
||||||
version,
|
version,
|
||||||
|
|
|
@ -6,7 +6,7 @@ use hyper::{Body, Method, Request};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
use garage_table::*;
|
use garage_table::*;
|
||||||
use garage_util::data::{hash, Hash};
|
use garage_util::data::{sha256sum, Hash};
|
||||||
|
|
||||||
use garage_model::garage::Garage;
|
use garage_model::garage::Garage;
|
||||||
use garage_model::key_table::*;
|
use garage_model::key_table::*;
|
||||||
|
@ -296,7 +296,7 @@ fn canonical_query_string(uri: &hyper::Uri) -> String {
|
||||||
|
|
||||||
pub fn verify_signed_content(content_sha256: Option<Hash>, body: &[u8]) -> Result<(), Error> {
|
pub fn verify_signed_content(content_sha256: Option<Hash>, body: &[u8]) -> Result<(), Error> {
|
||||||
let expected_sha256 = content_sha256.ok_or_bad_request("Request content hash not signed, aborting.")?;
|
let expected_sha256 = content_sha256.ok_or_bad_request("Request content hash not signed, aborting.")?;
|
||||||
if expected_sha256 != hash(body) {
|
if expected_sha256 != sha256sum(body) {
|
||||||
return Err(Error::BadRequest(format!("Request content hash does not match signed hash")));
|
return Err(Error::BadRequest(format!("Request content hash does not match signed hash")));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -176,7 +176,7 @@ impl BlockManager {
|
||||||
f.read_to_end(&mut data).await?;
|
f.read_to_end(&mut data).await?;
|
||||||
drop(f);
|
drop(f);
|
||||||
|
|
||||||
if data::hash(&data[..]) != *hash {
|
if data::sha256sum(&data[..]) != *hash {
|
||||||
let _lock = self.data_dir_lock.lock().await;
|
let _lock = self.data_dir_lock.lock().await;
|
||||||
warn!("Block {:?} is corrupted. Deleting and resyncing.", hash);
|
warn!("Block {:?} is corrupted. Deleting and resyncing.", hash);
|
||||||
fs::remove_file(path).await?;
|
fs::remove_file(path).await?;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::{Read, Write};
|
use std::io::{Read, Write};
|
||||||
|
use std::fmt::Write as FmtWrite;
|
||||||
use std::net::{IpAddr, SocketAddr};
|
use std::net::{IpAddr, SocketAddr};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
@ -10,7 +11,6 @@ use futures::future::join_all;
|
||||||
use futures::select;
|
use futures::select;
|
||||||
use futures_util::future::*;
|
use futures_util::future::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use tokio::prelude::*;
|
use tokio::prelude::*;
|
||||||
use tokio::sync::watch;
|
use tokio::sync::watch;
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
@ -134,16 +134,14 @@ impl Status {
|
||||||
let mut nodes = self.nodes.iter().collect::<Vec<_>>();
|
let mut nodes = self.nodes.iter().collect::<Vec<_>>();
|
||||||
nodes.sort_unstable_by_key(|(id, _status)| *id);
|
nodes.sort_unstable_by_key(|(id, _status)| *id);
|
||||||
|
|
||||||
let mut hasher = Sha256::new();
|
let mut nodes_txt = String::new();
|
||||||
debug!("Current set of pingable nodes: --");
|
debug!("Current set of pingable nodes: --");
|
||||||
for (id, status) in nodes {
|
for (id, status) in nodes {
|
||||||
debug!("{} {}", hex::encode(&id), status.addr);
|
debug!("{} {}", hex::encode(&id), status.addr);
|
||||||
hasher.input(format!("{} {}\n", hex::encode(&id), status.addr));
|
writeln!(&mut nodes_txt, "{} {}", hex::encode(&id), status.addr).unwrap();
|
||||||
}
|
}
|
||||||
debug!("END --");
|
debug!("END --");
|
||||||
self.hash
|
self.hash = sha256sum(nodes_txt.as_bytes());
|
||||||
.as_slice_mut()
|
|
||||||
.copy_from_slice(&hasher.result()[..]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ impl Ring {
|
||||||
let datacenter_idx = datacenters.iter().enumerate().find(|(_, dc)| *dc == datacenter).unwrap().0;
|
let datacenter_idx = datacenters.iter().enumerate().find(|(_, dc)| *dc == datacenter).unwrap().0;
|
||||||
|
|
||||||
for i in 0..config.n_tokens {
|
for i in 0..config.n_tokens {
|
||||||
let location = hash(format!("{} {}", hex::encode(&id), i).as_bytes());
|
let location = sha256sum(format!("{} {}", hex::encode(&id), i).as_bytes());
|
||||||
|
|
||||||
new_ring.push(RingEntry {
|
new_ring.push(RingEntry {
|
||||||
location: location.into(),
|
location: location.into(),
|
||||||
|
|
|
@ -10,7 +10,7 @@ pub trait PartitionKey {
|
||||||
|
|
||||||
impl PartitionKey for String {
|
impl PartitionKey for String {
|
||||||
fn hash(&self) -> Hash {
|
fn hash(&self) -> Hash {
|
||||||
hash(self.as_bytes())
|
sha256sum(self.as_bytes())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ impl TableFullReplication {
|
||||||
|
|
||||||
let mut nodes = vec![];
|
let mut nodes = vec![];
|
||||||
for (node, _) in ring.config.members.iter() {
|
for (node, _) in ring.config.members.iter() {
|
||||||
let node_ranking = hash(&[node.as_slice(), my_id.as_slice()].concat());
|
let node_ranking = sha256sum(&[node.as_slice(), my_id.as_slice()].concat());
|
||||||
nodes.push((*node, node_ranking));
|
nodes.push((*node, node_ranking));
|
||||||
}
|
}
|
||||||
nodes.sort_by(|(_, rank1), (_, rank2)| rank1.cmp(rank2));
|
nodes.sort_by(|(_, rank1), (_, rank2)| rank1.cmp(rank2));
|
||||||
|
|
|
@ -322,7 +322,7 @@ where
|
||||||
.range(range.begin.clone()..range.end.clone())
|
.range(range.begin.clone()..range.end.clone())
|
||||||
{
|
{
|
||||||
let (key, value) = item?;
|
let (key, value) = item?;
|
||||||
let key_hash = hash(&key[..]);
|
let key_hash = sha256sum(&key[..]);
|
||||||
if children.len() > 0
|
if children.len() > 0
|
||||||
&& key_hash.as_slice()[0..range.level]
|
&& key_hash.as_slice()[0..range.level]
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -340,7 +340,7 @@ where
|
||||||
end: vec![],
|
end: vec![],
|
||||||
level: 0,
|
level: 0,
|
||||||
};
|
};
|
||||||
children.push((item_range, hash(&value[..])));
|
children.push((item_range, sha256sum(&value[..])));
|
||||||
}
|
}
|
||||||
Ok(RangeChecksum {
|
Ok(RangeChecksum {
|
||||||
bounds: range.clone(),
|
bounds: range.clone(),
|
||||||
|
@ -378,7 +378,7 @@ where
|
||||||
}
|
}
|
||||||
let found_limit = sub_ck.found_limit.unwrap();
|
let found_limit = sub_ck.found_limit.unwrap();
|
||||||
|
|
||||||
let actual_limit_hash = hash(&found_limit[..]);
|
let actual_limit_hash = sha256sum(&found_limit[..]);
|
||||||
if actual_limit_hash.as_slice()[0..range.level]
|
if actual_limit_hash.as_slice()[0..range.level]
|
||||||
.iter()
|
.iter()
|
||||||
.all(|x| *x == 0u8)
|
.all(|x| *x == 0u8)
|
||||||
|
@ -426,7 +426,7 @@ where
|
||||||
);
|
);
|
||||||
|
|
||||||
let hash = if v.children.len() > 0 {
|
let hash = if v.children.len() > 0 {
|
||||||
Some(hash(&rmp_to_vec_all_named(&v)?[..]))
|
Some(sha256sum(&rmp_to_vec_all_named(&v)?[..]))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
|
@ -77,7 +77,7 @@ impl FixedBytes32 {
|
||||||
pub type UUID = FixedBytes32;
|
pub type UUID = FixedBytes32;
|
||||||
pub type Hash = FixedBytes32;
|
pub type Hash = FixedBytes32;
|
||||||
|
|
||||||
pub fn hash(data: &[u8]) -> Hash {
|
pub fn sha256sum(data: &[u8]) -> Hash {
|
||||||
let mut hasher = Sha256::new();
|
let mut hasher = Sha256::new();
|
||||||
hasher.input(data);
|
hasher.input(data);
|
||||||
let mut hash = [0u8; 32];
|
let mut hash = [0u8; 32];
|
||||||
|
|
Loading…
Reference in a new issue