Add Pcd::to_sorted_key_values, and hash values with a counter
This commit is contained in:
parent
1068340c09
commit
5be3340aad
110
src/pcd.rs
110
src/pcd.rs
@ -6,7 +6,6 @@ use aes_gcm::{
|
|||||||
aead::{Aead, Payload},
|
aead::{Aead, Payload},
|
||||||
AeadCore, Aes256Gcm, KeyInit,
|
AeadCore, Aes256Gcm, KeyInit,
|
||||||
};
|
};
|
||||||
use log::debug;
|
|
||||||
use rand::thread_rng;
|
use rand::thread_rng;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
@ -103,15 +102,29 @@ pub trait Pcd<'a>: Serialize + Deserialize<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_all_fields(&self, commited_in: OutPoint) -> Result<Map<String, Value>> {
|
fn to_sorted_key_values(&self) -> Result<Map<String, Value>> {
|
||||||
let map = self.to_value_object()?;
|
let map = self.to_value_object()?;
|
||||||
|
|
||||||
|
let mut sorted_key_values: Vec<(String, Value)> = map.into_iter().map(|(key, value)| (key, value)).collect();
|
||||||
|
sorted_key_values.sort_by_key(|(key, _)| key.to_owned());
|
||||||
|
|
||||||
|
Ok(sorted_key_values.into_iter().collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a hashed commitments for all values in the pcd
|
||||||
|
/// We need the commited_in outpoint to prevent same data producing the same hashes between different processes or differents states of the same process
|
||||||
|
fn hash_all_fields(&self, commited_in: OutPoint) -> Result<Map<String, Value>> {
|
||||||
let outpoint = serialize(&commited_in);
|
let outpoint = serialize(&commited_in);
|
||||||
|
|
||||||
let mut field2hash = Map::with_capacity(map.len());
|
// To prevent fields with identical data having identical commitments, we order the map in alphabetical order and append a counter to the value
|
||||||
|
let sorted_key_values = self.to_sorted_key_values()?;
|
||||||
|
|
||||||
|
let mut field2hash = Map::with_capacity(sorted_key_values.len());
|
||||||
// this could be optimised since there's a midstate we're reusing
|
// this could be optimised since there's a midstate we're reusing
|
||||||
for (field, value) in map {
|
for (i, (field, value)) in sorted_key_values.into_iter().enumerate() {
|
||||||
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value, &outpoint);
|
let mut value_bin = serde_json::to_string(&value)?.into_bytes();
|
||||||
|
value_bin.push(i.try_into()?);
|
||||||
|
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value_bin, &outpoint);
|
||||||
field2hash.insert(field, Value::String(tagged_hash.to_string()));
|
field2hash.insert(field, Value::String(tagged_hash.to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -204,9 +217,9 @@ pub trait Pcd<'a>: Serialize + Deserialize<'a> {
|
|||||||
fields2keys: &Map<String, Value>,
|
fields2keys: &Map<String, Value>,
|
||||||
fields2plain: &mut Map<String, Value>,
|
fields2plain: &mut Map<String, Value>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let map = self.to_value_object()?;
|
let sorted_key_values = self.to_sorted_key_values()?;
|
||||||
|
|
||||||
for (field, encrypted_value) in map.iter() {
|
for (i, (field, encrypted_value)) in sorted_key_values.iter().enumerate() {
|
||||||
if let Some(aes_key) = fields2keys.get(field) {
|
if let Some(aes_key) = fields2keys.get(field) {
|
||||||
let key_buf = Vec::from_hex(&aes_key.to_string().trim_matches('\"'))?;
|
let key_buf = Vec::from_hex(&aes_key.to_string().trim_matches('\"'))?;
|
||||||
|
|
||||||
@ -239,7 +252,9 @@ pub trait Pcd<'a>: Serialize + Deserialize<'a> {
|
|||||||
fields2plain.insert(field.to_owned(), serde_json::from_str(&decrypted_value)?);
|
fields2plain.insert(field.to_owned(), serde_json::from_str(&decrypted_value)?);
|
||||||
} else if let Some(commitment) = fields2commit.get(field) { // We should always have a commitment
|
} else if let Some(commitment) = fields2commit.get(field) { // We should always have a commitment
|
||||||
// We check if the hashed value is the commitment
|
// We check if the hashed value is the commitment
|
||||||
let hashed_value = AnkPcdHash::from_value_with_outpoint(encrypted_value, &serialize(&commited_in));
|
let mut value_bin = encrypted_value.to_string().into_bytes();
|
||||||
|
value_bin.push(i.try_into()?);
|
||||||
|
let hashed_value = AnkPcdHash::from_value_with_outpoint(&value_bin, &serialize(&commited_in));
|
||||||
if commitment.as_str().unwrap() != &hashed_value.to_string() {
|
if commitment.as_str().unwrap() != &hashed_value.to_string() {
|
||||||
// The value is encrypted, and we don't have the key
|
// The value is encrypted, and we don't have the key
|
||||||
// We put the commitment instead of the encrypted value
|
// We put the commitment instead of the encrypted value
|
||||||
@ -258,7 +273,7 @@ pub trait Pcd<'a>: Serialize + Deserialize<'a> {
|
|||||||
|
|
||||||
match value {
|
match value {
|
||||||
Value::Object(map) => Ok(map),
|
Value::Object(map) => Ok(map),
|
||||||
_ => Err(Error::msg("self is not a valid json object"))
|
_ => Err(Error::msg("not a valid json object"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -544,6 +559,83 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort_map() {
|
||||||
|
let pcd = json!({
|
||||||
|
"z": 1,
|
||||||
|
"b": 2,
|
||||||
|
"a": 3
|
||||||
|
});
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"a": 3,
|
||||||
|
"b": 2,
|
||||||
|
"z": 1
|
||||||
|
});
|
||||||
|
|
||||||
|
let sorted_map = pcd.to_sorted_key_values().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(Value::Object(sorted_map), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort_empty_map() {
|
||||||
|
let empty_map = json!({});
|
||||||
|
let expected = json!({});
|
||||||
|
|
||||||
|
let actual_sorted_map = empty_map.to_sorted_key_values().expect("Failed to sort keys");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Value::Object(actual_sorted_map),
|
||||||
|
expected,
|
||||||
|
"Sorting failed for an empty map"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort_already_sorted_map() {
|
||||||
|
let sorted_map = json!({
|
||||||
|
"a": 1,
|
||||||
|
"b": 2,
|
||||||
|
"c": 3
|
||||||
|
});
|
||||||
|
|
||||||
|
let expected = sorted_map.clone(); // Expected result is the same
|
||||||
|
|
||||||
|
let actual_sorted_map = sorted_map.to_sorted_key_values().expect("Failed to sort keys");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Value::Object(actual_sorted_map),
|
||||||
|
expected,
|
||||||
|
"Sorting failed for an already sorted map"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort_mixed_value_map() {
|
||||||
|
let mixed_map = json!({
|
||||||
|
"z": [1, 2, 3],
|
||||||
|
"b": { "nested": true },
|
||||||
|
"a": 42
|
||||||
|
});
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"a": 42,
|
||||||
|
"b": { "nested": true },
|
||||||
|
"z": [1, 2, 3]
|
||||||
|
});
|
||||||
|
|
||||||
|
let actual_sorted_map = mixed_map.to_sorted_key_values().expect("Failed to sort keys");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
Value::Object(actual_sorted_map),
|
||||||
|
expected,
|
||||||
|
"Sorting failed for a map with mixed value types"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_validation_rule_new() {
|
fn test_validation_rule_new() {
|
||||||
// Valid input
|
// Valid input
|
||||||
|
Loading…
x
Reference in New Issue
Block a user