Complete overhaul of the pcd/process logic, with sensible custom types and less serialization

This commit is contained in:
NicolasCantu 2025-03-12 10:24:20 +01:00
parent 8c5b2701fa
commit bd8ae9183a
4 changed files with 326 additions and 456 deletions

View File

@ -1,15 +1,12 @@
use std::collections::BTreeMap;
use anyhow::Result;
use rand::{thread_rng, RngCore};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sp_client::bitcoin::hex::DisplayHex;
use sp_client::bitcoin::OutPoint;
use tsify::Tsify;
use crate::error::AnkError;
use crate::pcd::RoleDefinition;
use crate::pcd::{Pcd, PcdCommitments, Roles};
use crate::serialization::{OutPointMemberMap, OutPointProcessMap};
use crate::signature::Proof;
@ -74,9 +71,9 @@ impl AnkFlag {
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct CommitMessage {
pub process_id: OutPoint,
pub pcd_commitment: Value, // map of field <=> hash of the clear value
pub roles: BTreeMap<String, RoleDefinition>,
pub public_data: BTreeMap<String, String>,
pub pcd_commitment: PcdCommitments, // map of field <=> hash of the clear value
pub roles: Roles,
pub public_data: Pcd,
pub validation_tokens: Vec<Proof>,
pub error: Option<AnkError>,
}
@ -86,9 +83,9 @@ impl CommitMessage {
/// validation_tokens must be empty
pub fn new_update_commitment(
process_id: OutPoint,
pcd_commitment: Value,
roles: BTreeMap<String, RoleDefinition>,
public_data: BTreeMap<String, String>,
pcd_commitment: PcdCommitments,
roles: Roles,
public_data: Pcd,
) -> Self {
Self {
process_id,

View File

@ -1,28 +1,25 @@
use anyhow::{Error, Result};
use rs_merkle::{algorithms::Sha256, MerkleTree};
use serde::de::{DeserializeOwned, Error as DeError};
use serde::ser::SerializeStruct;
use std::collections::HashSet;
use wasm_bindgen::JsValue;
use std::collections::{BTreeMap, HashSet};
use std::hash::{Hash as StdHash, Hasher};
use std::fmt;
use aes_gcm::{
aead::{Aead, Payload},
AeadCore, Aes256Gcm, KeyInit,
};
use rand::thread_rng;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use serde_json::Value;
use sp_client::{
bitcoin::{
consensus::serialize, hashes::{sha256t_hash_newtype, Hash, HashEngine}, hex::{DisplayHex, FromHex}, secp256k1::PublicKey, OutPoint
consensus::serialize, hashes::{sha256t_hash_newtype, Hash, HashEngine}, secp256k1::PublicKey, OutPoint
},
silentpayments::utils::SilentPaymentAddress,
};
use tsify::Tsify;
use crate::{
crypto::AAD,
signature::{AnkHash, AnkValidationNoHash, AnkValidationYesHash, Proof},
serialization::hex_array_btree
};
#[derive(Debug, Default, Clone, Deserialize, Tsify)]
@ -135,228 +132,148 @@ impl AnkPcdHash {
}
}
pub trait Pcd<'a>: Serialize + Deserialize<'a> {
fn new_from_string(str: &str) -> Result<Value> {
let value: Value = serde_json::from_str(str)?;
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Tsify)]
#[tsify(into_wasm_abi)]
pub struct Pcd(BTreeMap<String, Value>);
match value {
Value::Object(_) => Ok(value),
_ => Err(Error::msg("Not a Pcd: not a valid JSON object"))
}
impl IntoIterator for Pcd {
type Item = (String, Value);
type IntoIter = std::collections::btree_map::IntoIter<String, Value>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl Pcd {
pub fn new(map: BTreeMap<String, Value>) -> Self {
Self(map)
}
fn to_sorted_key_values(&self) -> Result<Map<String, Value>> {
let map = self.to_value_object()?;
let mut sorted_key_values: Vec<(String, Value)> = map
.into_iter()
.map(|(key, value)| {
let sorted_value = match value {
Value::Object(obj) => {
// Recursively sort nested objects
let mut sorted_nested: Vec<(String, Value)> = obj.into_iter().collect();
sorted_nested.sort_by_key(|(k, _)| k.clone());
Value::Object(sorted_nested.into_iter().collect())
}
_ => value, // Keep other values unchanged
};
(key, sorted_value)
})
.collect();
// Sort top-level keys
sorted_key_values.sort_by_key(|(key, _)| key.clone());
Ok(sorted_key_values.into_iter().collect())
pub fn get(&self, key: &str) -> Option<&Value> {
self.0.get(key)
}
/// Create a hashed commitments for all values in the pcd
/// We need the commited_in outpoint to prevent same data producing the same hashes between different processes or differents states of the same process
fn hash_all_fields(&self, commited_in: OutPoint) -> Result<Map<String, Value>> {
let outpoint = serialize(&commited_in);
pub fn len(&self) -> usize {
self.0.len()
}
// To prevent fields with identical data having identical commitments, we order the map in alphabetical order and append a counter to the value
let sorted_key_values = self.to_sorted_key_values()?;
pub fn iter(&self) -> std::collections::btree_map::Iter<'_, String, Value> {
self.0.iter()
}
let mut field2hash = Map::with_capacity(sorted_key_values.len());
// this could be optimised since there's a midstate we're reusing
for (i, (field, value)) in sorted_key_values.into_iter().enumerate() {
pub fn iter_mut(&mut self) -> std::collections::btree_map::IterMut<'_, String, Value> {
self.0.iter_mut()
}
pub fn insert(&mut self, key: String, value: Value) -> Option<Value> {
self.0.insert(key, value)
}
}
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct PcdCommitments(#[serde(with = "hex_array_btree")] BTreeMap<String, [u8; 32]>);
impl Tsify for PcdCommitments {
type JsType = JsValue;
const DECL: &'static str = "Record<string, string>";
fn from_js<T: Into<wasm_bindgen::JsValue>>(js: T) -> serde_json::Result<Self>
where
Self: DeserializeOwned, {
serde_wasm_bindgen::from_value(js.into()).map_err(|e| serde_json::Error::custom(e.to_string()))
}
fn into_js(&self) -> serde_json::Result<Self::JsType>
where
Self: Serialize, {
serde_wasm_bindgen::to_value(self)
.map_err(|e| serde_json::Error::custom(e.to_string()))
}
}
impl PcdCommitments {
/// Creates a new commitments map with both permissioned and public data, + roles
pub fn new(commited_in: &OutPoint, clear_state: &Pcd, public_data: &Pcd, roles: &Roles) -> Result<Self> {
let serialized_outpoint = serialize(commited_in);
let mut field2hash: BTreeMap<String, [u8; 32]> = BTreeMap::new();
for (field, value) in clear_state.iter() {
let mut value_bin = serde_json::to_string(&value)?.into_bytes();
value_bin.push(i.try_into()?);
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value_bin, &outpoint);
field2hash.insert(field, Value::String(tagged_hash.to_string()));
value_bin.extend_from_slice(field.as_bytes());
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value_bin, &serialized_outpoint);
field2hash.insert(field.to_owned(), tagged_hash.to_byte_array());
}
Ok(field2hash)
for (field, value) in public_data.iter() {
let mut value_bin = serde_json::to_string(&value)?.into_bytes();
value_bin.extend_from_slice(field.as_bytes());
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value_bin, &serialized_outpoint);
field2hash.insert(field.to_owned(), tagged_hash.to_byte_array());
}
let serialized_roles = roles.to_bytes()?;
let roles_hash = AnkPcdHash::from_value_with_outpoint(&serialized_roles, &serialized_outpoint);
field2hash.insert("roles".to_owned(), roles_hash.to_byte_array());
Ok(Self(field2hash))
}
/// We run this on the result of `hash_all_fields`
fn create_merkle_tree(&self) -> Result<MerkleTree<Sha256>> {
let map = self.to_sorted_key_values()?;
let leaves: Result<Vec<[u8; 32]>> = map
.iter()
.map(|(_, value)| {
let mut res = [0u8; 32];
if !value.is_string() {
return Err(Error::msg("value is not a string"));
}
let vec = Vec::from_hex(value.as_str().unwrap())?;
if vec.len() != 32 {
return Err(Error::msg("value must be 32B length"));
}
res.copy_from_slice(&vec);
Ok(res)
})
pub fn empty() -> Self {
Self(BTreeMap::new())
}
pub fn update_with_value(&mut self, outpoint: &OutPoint, field: &str, new_value: &Value) -> Result<()> {
let serialized_outpoint = serialize(outpoint);
if let Some(old_hash) = self.get_mut(field) {
// We hash the new_value
let mut value_bin = serde_json::to_string(new_value)?.into_bytes();
value_bin.extend_from_slice(field.as_bytes());
let tagged_hash = AnkPcdHash::from_value_with_outpoint(&value_bin, &serialized_outpoint);
*old_hash = tagged_hash.to_byte_array();
}
Ok(())
}
pub fn get(&self, field: &str) -> Option<&[u8; 32]> {
self.0.get(field)
}
pub fn get_mut(&mut self, field: &str) -> Option<&mut [u8; 32]> {
self.0.get_mut(field)
}
pub fn iter(&self) -> std::collections::btree_map::Iter<'_, String, [u8; 32]> {
self.0.iter()
}
pub fn iter_mut(&mut self) -> std::collections::btree_map::IterMut<'_, String, [u8; 32]> {
self.0.iter_mut()
}
pub fn keys(&self) -> Vec<String> {
self.0.keys().map(|k| k.to_owned()).collect()
}
/// Since BTreeMap keys order is deterministic, we can guarantee a consistent merkle tree
pub fn create_merkle_tree(&self) -> Result<MerkleTree<Sha256>> {
let leaves: Vec<[u8; 32]> = self.0
.values()
.map(|hash| *hash)
.collect();
let mut leaves = leaves?;
leaves.sort_unstable();
let merkle_tree = MerkleTree::<Sha256>::from_leaves(&leaves);
let merkle_tree = MerkleTree::<Sha256>::from_leaves(leaves.as_slice());
Ok(merkle_tree)
}
fn encrypt_fields(
&self,
fields_to_encrypt: &[String],
fields2keys: &mut Map<String, Value>,
fields2cipher: &mut Map<String, Value>,
) -> Result<()> {
let map = self.to_value_object()?;
let mut rng = thread_rng();
for (field, value) in map {
if fields_to_encrypt.contains(&field) {
if let None = fields2keys.get(&field) {
let aes_key = Aes256Gcm::generate_key(&mut rng);
fields2keys.insert(
field.to_owned(),
Value::String(aes_key.to_lower_hex_string()),
);
}
let nonce = Aes256Gcm::generate_nonce(&mut rng);
let aes_key_value = fields2keys.get(&field).expect("We should have a key");
let aes_key_str: String = serde_json::from_value(aes_key_value.clone())?;
let aes_key = Vec::from_hex(&aes_key_str)?;
let encrypt_eng = Aes256Gcm::new(aes_key.as_slice().into());
let value_string = serde_json::to_string(&value)?;
let payload = Payload {
msg: value_string.as_bytes(),
aad: AAD,
};
let cipher = encrypt_eng.encrypt(&nonce, payload).map_err(|e| {
Error::msg(format!("Encryption failed for field {}: {}", field, e))
})?;
let mut res = Vec::with_capacity(nonce.len() + cipher.len());
res.extend_from_slice(&nonce);
res.extend_from_slice(&cipher);
fields2cipher.insert(field.to_owned(), Value::String(res.to_lower_hex_string()));
} else {
if let None = fields2cipher.get(&field) {
fields2cipher.insert(field.to_owned(), value.clone());
}
// if we already have something in the encrypted map, we leave it as it is
}
}
Ok(())
}
fn decrypt_all(
&self,
commited_in: OutPoint,
fields2commit: &Map<String, Value>,
fields2keys: &Map<String, Value>,
fields2plain: &mut Map<String, Value>,
) -> Result<()> {
let sorted_key_values = self.to_sorted_key_values()?;
for (i, (field, encrypted_value)) in sorted_key_values.iter().enumerate() {
if let Some(aes_key) = fields2keys.get(field) {
let key_buf = Vec::from_hex(&aes_key.to_string().trim_matches('\"'))?;
let decrypt_eng = Aes256Gcm::new(key_buf.as_slice().into());
let raw_cipher = Vec::from_hex(
&encrypted_value
.as_str()
.ok_or_else(|| Error::msg("Expected string"))?
.trim_matches('\"'),
)?;
if raw_cipher.len() < 28 {
return Err(Error::msg(format!(
"Invalid ciphertext length for field {}",
field
)));
}
let payload = Payload {
msg: &raw_cipher[12..],
aad: AAD,
};
let plain = decrypt_eng
.decrypt(raw_cipher[..12].into(), payload)
.map_err(|_| Error::msg(format!("Failed to decrypt field {}", field)))?;
let decrypted_value: String = String::from_utf8(plain)?;
fields2plain.insert(field.to_owned(), serde_json::from_str(&decrypted_value)?);
} else if let Some(commitment) = fields2commit.get(field) { // We should always have a commitment
// We check if the hashed value is the commitment
let mut value_bin = encrypted_value.to_string().into_bytes();
value_bin.push(i.try_into()?);
let hashed_value = AnkPcdHash::from_value_with_outpoint(&value_bin, &serialize(&commited_in));
if commitment.as_str().unwrap() != &hashed_value.to_string() {
// The value is encrypted, and we don't have the key
// We put the commitment instead of the encrypted value
fields2plain.insert(field.to_owned(), commitment.clone());
} // else it means the value is simply unencrypted, we leave it as it is
} else {
return Err(Error::msg(format!("Missing commitment for field {}", field)));
}
}
Ok(())
}
fn to_value_object(&self) -> Result<Map<String, Value>> {
let value = serde_json::to_value(self)?;
match value {
Value::Object(map) => Ok(map),
_ => Err(Error::msg("not a valid json object"))
}
}
fn is_hex_string(&self, length: Option<usize>) -> Result<()> {
let value = serde_json::to_value(self)?;
match value {
Value::String(s) => {
let vec = Vec::from_hex(&s)?;
if let Some(len) = length {
let got_length = vec.len();
if got_length != len {
return Err(Error::msg(format!("Wrong length: expected {}, got {}", len, got_length)));
}
}
Ok(())
}
_ => Err(Error::msg("Not a string Value"))
}
pub fn find_index_of(&self, field: &str) -> Option<usize> {
self.iter().position(|(key, _)| key.as_str() == field)
}
}
impl Pcd<'_> for Value {}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Tsify)]
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct ValidationRule {
@ -527,6 +444,46 @@ impl RoleDefinition {
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Tsify)]
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct Roles(BTreeMap<String, RoleDefinition>);
impl IntoIterator for Roles {
type Item = (String, RoleDefinition);
type IntoIter = std::collections::btree_map::IntoIter<String, RoleDefinition>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl Roles {
pub fn new(roles: BTreeMap<String, RoleDefinition>) -> Self {
Roles(roles)
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
serde_json::to_vec(&self.0).map_err(|e| Error::msg(e.to_string()))
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn get(&self, key: &str) -> Option<&RoleDefinition> {
self.0.get(key)
}
pub fn iter(&self) -> std::collections::btree_map::Iter<'_, String, RoleDefinition> {
self.0.iter()
}
pub fn iter_mut(&mut self) -> std::collections::btree_map::IterMut<'_, String, RoleDefinition> {
self.0.iter_mut()
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
@ -590,83 +547,6 @@ mod tests {
.unwrap()
}
#[test]
fn test_sort_map() {
let pcd = json!({
"z": 1,
"b": 2,
"a": 3
});
let expected = json!({
"a": 3,
"b": 2,
"z": 1
});
let sorted_map = pcd.to_sorted_key_values().unwrap();
assert_eq!(Value::Object(sorted_map), expected);
}
#[test]
fn test_sort_empty_map() {
let empty_map = json!({});
let expected = json!({});
let actual_sorted_map = empty_map.to_sorted_key_values().expect("Failed to sort keys");
assert_eq!(
Value::Object(actual_sorted_map),
expected,
"Sorting failed for an empty map"
);
}
#[test]
fn test_sort_already_sorted_map() {
let sorted_map = json!({
"a": 1,
"b": 2,
"c": 3
});
let expected = sorted_map.clone(); // Expected result is the same
let actual_sorted_map = sorted_map.to_sorted_key_values().expect("Failed to sort keys");
assert_eq!(
Value::Object(actual_sorted_map),
expected,
"Sorting failed for an already sorted map"
);
}
#[test]
fn test_sort_mixed_value_map() {
let mixed_map = json!({
"z": [1, 2, 3],
"b": { "nested": true },
"a": 42
});
let expected = json!({
"a": 42,
"b": { "nested": true },
"z": [1, 2, 3]
});
let actual_sorted_map = mixed_map.to_sorted_key_values().expect("Failed to sort keys");
assert_eq!(
Value::Object(actual_sorted_map),
expected,
"Sorting failed for a map with mixed value types"
);
}
#[test]
fn test_validation_rule_new() {
// Valid input
@ -707,9 +587,12 @@ mod tests {
let fields = vec!["field1".to_string(), "field2".to_string()];
let validation_rule = ValidationRule::new(0.5, fields.clone(), 0.5).unwrap();
let pcd = json!({"field1": "value1", "field2": "value2"});
let commitment = pcd.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(commitment).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash1 = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
let validation_hash2 = AnkValidationNoHash::from_merkle_root(new_state_merkle_root);
@ -771,10 +654,12 @@ mod tests {
let fields = vec!["field1".to_string(), "field2".to_string()];
let validation_rule = ValidationRule::new(0.5, fields.clone(), 0.5).unwrap();
let pcd = json!({"field1": "value1", "field2": "value2"});
let commitment = pcd.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(commitment).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash_yes = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
let validation_hash_no = AnkValidationNoHash::from_merkle_root(new_state_merkle_root);
@ -823,9 +708,12 @@ mod tests {
let fields = vec!["field1".to_string(), "field2".to_string()];
let validation_rule = ValidationRule::new(1.0, fields.clone(), 0.5).unwrap();
let pcd = json!({"field1": "value1", "field2": "value2"});
let commitment = pcd.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(commitment).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
@ -868,9 +756,12 @@ mod tests {
let fields = vec!["field1".to_string(), "field2".to_string()];
let validation_rule = ValidationRule::new(0.5, fields.clone(), 0.5).unwrap();
let pcd = json!({"field1": "value1", "field2": "value2"});
let commitment = pcd.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(commitment).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
@ -917,9 +808,12 @@ mod tests {
)
.unwrap()])
.unwrap();
let pcd = json!({"field1": "value1"});
let commitments = pcd.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(commitments).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let alice_spend_key: SecretKey = alice_wallet
.get_client()
@ -971,8 +865,12 @@ mod tests {
let previous_state = json!({ "field1": "old_value1", "field2": "old_value2" });
let new_state = json!({ "field1": "new_value1", "field2": "new_value2" });
let new_state_commitments = new_state.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(new_state_commitments).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
@ -1027,8 +925,12 @@ mod tests {
let previous_state = json!({ "field1": "old_value1", "field2": "old_value2" });
let new_state = json!({ "field1": "new_value1", "field2": "new_value2" });
let new_state_commitments = new_state.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(new_state_commitments).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
// let validation_hash1 = AnkValidationYesHash::from_commitment(new_state_hash);
let validation_hash = AnkValidationNoHash::from_merkle_root(new_state_merkle_root);
@ -1084,8 +986,12 @@ mod tests {
let previous_state = json!({ "field1": "old_value1", "field2": "old_value2" });
let new_state = json!({ "field1": "old_value1", "field2": "new_value2" });
let new_state_commitments = new_state.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(new_state_commitments).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
// let validation_hash = AnkValidationNoHash::from_merkle_root(new_state_merkle_root);
@ -1141,8 +1047,12 @@ mod tests {
let previous_state = json!({ "field1": "old_value1", "field2": "old_value2" });
let new_state = json!({ "field1": "old_value1", "field2": "new_value2" });
let new_state_commitments = new_state.hash_all_fields(OutPoint::null()).unwrap();
let new_state_merkle_root = Value::Object(new_state_commitments).create_merkle_tree().unwrap().root().unwrap();
let clear_state_value = json!({"field1": "value1", "field2": "value2"});
let pcd: BTreeMap<String, Value> = serde_json::from_value(clear_state_value).unwrap();
let public_data = BTreeMap::new();
let roles = BTreeMap::new();
let commitments = PcdCommitments::new(&OutPoint::null(), &Pcd::new(pcd), &Pcd::new(public_data), &Roles::new(roles)).unwrap();
let new_state_merkle_root = commitments.create_merkle_tree().unwrap().root().unwrap();
let validation_hash = AnkValidationYesHash::from_merkle_root(new_state_merkle_root);
// let validation_hash = AnkValidationNoHash::from_merkle_root(new_state_merkle_root);

View File

@ -11,7 +11,7 @@ use sp_client::silentpayments::utils::SilentPaymentAddress;
use sp_client::spclient::SpWallet;
use tsify::Tsify;
use crate::pcd::{Member, RoleDefinition};
use crate::pcd::{Member, Pcd, PcdCommitments, Roles};
use crate::signature::{AnkHash, AnkMessageHash, Proof};
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize, Tsify)]
@ -57,13 +57,13 @@ impl AnkPrdHash {
#[allow(non_camel_case_types)]
pub struct Prd {
pub prd_type: PrdType,
pub process_id: String,
pub sender: String,
pub keys: Map<String, Value>, // key is a key in pcd, value is the key to decrypt it
pub pcd_commitments: Value,
pub process_id: OutPoint,
pub sender: Member,
pub keys: BTreeMap<String, [u8; 32]>, // key is a key in pcd, value is the key to decrypt it
pub pcd_commitments: PcdCommitments,
pub validation_tokens: Vec<Proof>,
pub roles: BTreeMap<String, RoleDefinition>,
pub public_data: BTreeMap<String, String>,
pub roles: Roles,
pub public_data: Pcd,
pub payload: String, // Additional information depending on the type
pub proof: Option<Proof>, // This must be None up to the creation of the network message
}
@ -77,11 +77,11 @@ impl Prd {
let validation_tokens = if let Some(proof) = previous_proof { vec![proof] } else { vec![] };
Self {
prd_type: PrdType::Connect,
process_id: String::default(),
pcd_commitments: Value::Null,
sender: serde_json::to_string(&sender).unwrap(),
process_id: OutPoint::null(),
pcd_commitments: PcdCommitments::empty(),
sender,
validation_tokens,
keys: Map::new(),
keys: BTreeMap::new(),
payload: secret_hash.to_string(),
proof: None,
..Default::default()
@ -91,15 +91,15 @@ impl Prd {
pub fn new_update(
process_id: OutPoint,
sender: Member,
roles: BTreeMap<String, RoleDefinition>,
public_data: BTreeMap<String, String>,
keys: Map<String, Value>,
pcd_commitments: Value,
roles: Roles,
public_data: Pcd,
keys: BTreeMap<String, [u8; 32]>,
pcd_commitments: PcdCommitments,
) -> Self {
Self {
prd_type: PrdType::Update,
process_id: process_id.to_string(),
sender: serde_json::to_string(&sender).unwrap(),
process_id,
sender,
validation_tokens: vec![],
keys,
pcd_commitments,
@ -114,17 +114,14 @@ impl Prd {
process_id: OutPoint,
sender: Member,
validation_tokens: Vec<Proof>,
pcd_commitments: Value,
pcd_commitments: PcdCommitments,
) -> Self {
Self {
prd_type: PrdType::Response,
process_id: process_id.to_string(),
sender: serde_json::to_string(&sender).unwrap(),
validation_tokens: validation_tokens,
keys: Map::new(),
process_id,
sender,
validation_tokens,
pcd_commitments,
payload: String::default(),
proof: None,
..Default::default()
}
}
@ -132,17 +129,13 @@ impl Prd {
pub fn new_confirm(
process_id: OutPoint,
sender: Member,
pcd_commitments: Value,
pcd_commitments: PcdCommitments,
) -> Self {
Self {
prd_type: PrdType::Confirm,
process_id: process_id.to_string(),
process_id,
pcd_commitments,
sender: serde_json::to_string(&sender).unwrap(),
validation_tokens: vec![],
keys: Map::new(),
payload: String::default(),
proof: None,
sender,
..Default::default()
}
}
@ -150,8 +143,8 @@ impl Prd {
pub fn new_request(process_id: OutPoint, sender: Member, state_ids: Vec<[u8; 32]>) -> Self {
Self {
prd_type: PrdType::Request,
process_id: process_id.to_string(),
sender: serde_json::to_string(&sender).unwrap(),
process_id,
sender,
payload: serde_json::to_string(&state_ids).unwrap(),
..Default::default()
}
@ -169,8 +162,7 @@ impl Prd {
return Err(anyhow::Error::msg("Proof signed by ourselves, we are parsing our own message"));
}
// take the spending keys in sender
let sender: Member = serde_json::from_str(&prd.sender)?;
let addresses = sender.get_addresses();
let addresses = prd.sender.get_addresses();
let mut spend_keys: Vec<PublicKey> = vec![];
for address in addresses {
spend_keys.push(
@ -198,7 +190,7 @@ impl Prd {
pub fn filter_keys(&mut self, to_keep: &HashSet<String>) {
let current_keys = self.keys.clone();
let filtered_keys: Map<String, Value> = current_keys
let filtered_keys: BTreeMap<String, [u8; 32]> = current_keys
.into_iter()
.filter(|(field, _)| to_keep.contains(field))
.collect();

View File

@ -4,130 +4,85 @@ use std::{
};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use sp_client::bitcoin::{hex::{DisplayHex, FromHex}, OutPoint, Transaction};
use serde_json::Value;
use sp_client::bitcoin::{OutPoint, Transaction};
use tsify::Tsify;
use crate::{
pcd::{Member, Pcd, RoleDefinition},
pcd::{Member, Pcd, PcdCommitments, RoleDefinition, Roles},
signature::{AnkHash, AnkValidationNoHash, AnkValidationYesHash, Proof},
MutexExt,
serialization::{hex_array_btree, serialize_hex, deserialize_hex},
};
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, Tsify)]
#[tsify(into_wasm_abi)]
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct ProcessState {
pub commited_in: OutPoint,
#[tsify(type = "Record<string, string>")]
pub pcd_commitment: Value, // If we can't modify a field, we just copy the previous value
pub state_id: String, // the root of the tree created with all the commitments. Serves as an unique id for a state too
pub pcd_commitment: PcdCommitments,
#[serde(serialize_with = "serialize_hex", deserialize_with = "deserialize_hex")]
#[tsify(type = "string")]
pub state_id: [u8; 32], // the root of the tree created with all the commitments + public_data + roles. Serves as an unique id for a state too
#[serde(with = "hex_array_btree")]
#[tsify(type = "Record<string, string>")]
pub encrypted_pcd: Value, // Some fields may be clear, if the owner of the process decides so
pub keys: BTreeMap<String, [u8; 32]>, // We may not always have all the keys
pub validation_tokens: Vec<Proof>, // Signature of the hash of pcd_commitment tagged with some decision like "yes" or "no"
#[tsify(type = "Record<string, string>")]
pub keys: Map<String, Value>, // We may not always have all the keys
pub validation_tokens: Vec<Proof>, // Signature of the hash of the encrypted pcd tagged with some decision like "yes" or "no"
#[tsify(type = "Record<string, string>")]
pub public_data: BTreeMap<String, String>, // long descriptions that can be used for the ihm
pub public_data: Pcd,
#[tsify(type = "Record<string, RoleDefinition>")]
pub roles: BTreeMap<String, RoleDefinition>,
pub roles: Roles,
}
impl ProcessState {
pub fn new(commited_in: OutPoint, clear_state: Map<String, Value>, public_data: &BTreeMap<String, String>, roles: BTreeMap<String, RoleDefinition>) -> anyhow::Result<Self> {
let mut keys = Map::new();
let mut encrypted = Map::new();
pub fn new(commited_in: OutPoint, clear_state: Pcd, public_data: Pcd, roles: Roles) -> anyhow::Result<Self> {
let pcd_commitment = PcdCommitments::new(&commited_in, &clear_state, &public_data, &roles)?;
let clear_pcd = Value::Object(clear_state);
let sorted_pcd = Value::Object(clear_pcd.to_sorted_key_values()?);
let pcd_commitment = Value::Object(sorted_pcd.hash_all_fields(commited_in)?);
let merkle_root = pcd_commitment.create_merkle_tree()?.root().ok_or(anyhow::Error::msg("Invalid merkle tree"))?.to_lower_hex_string();
let keys_to_encrypt: Vec<String> = pcd_commitment.as_object().unwrap().keys().map(|k| k.clone()).collect();
sorted_pcd.encrypt_fields(&keys_to_encrypt, &mut keys, &mut encrypted)?;
let merkle_root = pcd_commitment.create_merkle_tree()?.root().ok_or(anyhow::Error::msg("Invalid merkle tree"))?;
let res = Self {
commited_in,
pcd_commitment,
state_id: merkle_root,
encrypted_pcd: Value::Object(encrypted),
keys,
keys: BTreeMap::new(),
validation_tokens: vec![],
public_data: public_data.clone(),
public_data,
roles,
};
Ok(res)
}
pub fn update_value(&mut self, key: &str, new_value: Value) -> anyhow::Result<()> {
// First decrypt values
let mut clear_pcd = self.decrypt_pcd()?;
if let Some(value) = clear_pcd.get_mut(key) {
// We can only update a value we can decrypt
if let None = self.keys.get(key) {
return Err(anyhow::Error::msg("Trying to update a value we can't access"));
}
// We replace the clear value by the new_value
*value = new_value;
} else {
return Err(anyhow::Error::msg(format!("{} doesn't exist", key)))
}
pub fn update_value(&mut self, key: &str, new_value: &Value) -> anyhow::Result<()> {
// Update the commitment
self.pcd_commitment = Value::Object(Value::Object(clear_pcd.clone()).hash_all_fields(self.commited_in)?);
self.pcd_commitment.update_with_value(&self.commited_in, key, new_value)?;
// Todo for now we rehash everything, which is a bit wasteful but fine for a proto
// Update merkle tree
let merkle_tree = self.pcd_commitment.create_merkle_tree()?;
// Update state_id
self.state_id = self.pcd_commitment.create_merkle_tree()?.root().unwrap().to_lower_hex_string();
// Update the encrypted value
Value::Object(clear_pcd).encrypt_fields(&[key.to_string()], &mut self.keys, self.encrypted_pcd.as_object_mut().unwrap())?;
self.state_id = merkle_tree.root().ok_or_else(|| anyhow::Error::msg("Invalid merkle tree"))?;
Ok(())
}
/// Return a decrypted version of the pcd in this state
/// 3 outputs possible for each field:
/// 1) We have the key and we return the decrypted value
/// 2) We don't have the key, we return the commitment
/// 3) the field is unencrypted, we leave it as sit is
pub fn decrypt_pcd(&self) -> anyhow::Result<Map<String, Value>> {
let mut fields2plain = Map::new();
let fields2commit = self.pcd_commitment.to_value_object()?;
self.encrypted_pcd.decrypt_all(self.commited_in, &fields2commit, &self.keys, &mut fields2plain)?;
Ok(fields2plain)
}
pub fn get_message_hash(&self, approval: bool) -> anyhow::Result<AnkHash> {
let merkle_root = <Value as Pcd>::create_merkle_tree(&self.pcd_commitment)?.root().unwrap();
if approval {
Ok(AnkHash::ValidationYes(AnkValidationYesHash::from_merkle_root(merkle_root)))
Ok(AnkHash::ValidationYes(AnkValidationYesHash::from_merkle_root(self.state_id)))
} else {
Ok(AnkHash::ValidationNo(AnkValidationNoHash::from_merkle_root(merkle_root)))
Ok(AnkHash::ValidationNo(AnkValidationNoHash::from_merkle_root(self.state_id)))
}
}
fn list_modified_fields(&self, previous_state: Option<&ProcessState>) -> Vec<String> {
let new_state = &self.pcd_commitment;
// Ensure the new state is a JSON object
let new_state_commitments = new_state
.as_object()
.expect("New state should be a JSON object");
if let Some(prev_state) = previous_state {
// Previous state exists; compute differences
let previous_state_commitments = prev_state
.pcd_commitment
.as_object()
.expect("Previous state should be a JSON object");
let previous_state_commitments = &prev_state.pcd_commitment;
// Compute modified fields by comparing with previous state
new_state_commitments
new_state
.iter()
.filter_map(|(key, value)| {
let previous_value = previous_state_commitments.get(key);
@ -140,7 +95,7 @@ impl ProcessState {
.collect()
} else {
// No previous state; all fields are considered modified
new_state_commitments.keys().cloned().collect()
new_state.keys()
}
}
@ -158,7 +113,6 @@ impl ProcessState {
return Err(anyhow::anyhow!("State is identical to the previous state"));
}
// Check if each modified field satisfies at least one applicable rule across all roles
let all_fields_validated = modified_fields.iter().all(|field| {
// Collect applicable rules from all roles for the current field
@ -181,14 +135,11 @@ impl ProcessState {
return false; // No rules apply to this field, consider it invalid
}
let mut merkle_root = [0u8; 32];
merkle_root.copy_from_slice(&Vec::from_hex(&self.state_id).unwrap());
applicable_roles.into_iter().any(|role_def| {
role_def.validation_rules.iter().any(|rule| {
rule.is_satisfied(
field,
merkle_root,
self.state_id,
&self.validation_tokens,
&role_def.members,
).is_ok()
@ -204,15 +155,14 @@ impl ProcessState {
}
pub fn is_empty(&self) -> bool {
self.encrypted_pcd == Value::Null ||
self.pcd_commitment == Value::Null
self.state_id == [0u8; 32]
}
pub fn get_fields_to_validate_for_member(&self, member: &Member) -> anyhow::Result<Vec<String>> {
let mut res: HashSet<String> = HashSet::new();
// Are we in that role?
for (_, role_def) in &self.roles {
for (_, role_def) in self.roles.iter() {
if !role_def.members.contains(member) {
continue;
} else {
@ -232,7 +182,7 @@ impl ProcessState {
/// A process is basically a succession of states
/// The latest state MUST be an empty state with only the commited_in field set at the last unspent outpoint
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize, Tsify)]
#[tsify(into_wasm_abi)]
#[tsify(into_wasm_abi, from_wasm_abi)]
pub struct Process {
states: Vec<ProcessState>,
}
@ -352,14 +302,14 @@ impl Process {
None
}
pub fn get_state_for_id(&self, state_id: &str) -> anyhow::Result<&ProcessState> {
pub fn get_state_for_id(&self, state_id: &[u8; 32]) -> anyhow::Result<&ProcessState> {
if self.get_number_of_states() == 0 {
// This should never happen, but we better get rid of it now
return Err(anyhow::Error::msg("process is empty".to_owned()));
}
for p in &self.states {
if state_id == p.state_id.as_str() {
if *state_id == p.state_id {
return Ok(p);
}
}
@ -367,14 +317,14 @@ impl Process {
return Err(anyhow::Error::msg("No state for this merkle root"));
}
pub fn get_state_for_id_mut(&mut self, state_id: &str) -> anyhow::Result<&mut ProcessState> {
pub fn get_state_for_id_mut(&mut self, state_id: &[u8; 32]) -> anyhow::Result<&mut ProcessState> {
if self.get_number_of_states() == 0 {
// This should never happen, but we better get rid of it now
return Err(anyhow::Error::msg("process is empty".to_owned()));
}
for p in &mut self.states {
if state_id == p.state_id.as_str() {
if *state_id == p.state_id {
return Ok(p);
}
}
@ -501,12 +451,12 @@ pub fn check_tx_for_process_updates(tx: &Transaction) -> anyhow::Result<OutPoint
log::debug!("Find a match for process tip {}", process_tip);
// This transaction commits a new state
let last_output = &tx.output.get(tx.output.len()-1).unwrap().script_pubkey;
let state_id: String;
let mut state_id = [0u8; 32];
if last_output.is_op_return() {
if last_output.as_bytes().len() != 34 {
return Err(anyhow::Error::msg("commited data is not 32B long"));
}
state_id = last_output.as_bytes()[2..].to_lower_hex_string();
state_id.clone_from_slice(&last_output.as_bytes()[2..]);
} else {
return Err(anyhow::Error::msg("last output must be op_return"));
}
@ -641,9 +591,11 @@ mod tests {
let validation_rule1 =
ValidationRule::new(1.0, vec!["field1".to_owned()], 0.5).unwrap();
let validation_rule2 = ValidationRule::new(1.0, vec!["field2".to_owned()], 0.5).unwrap();
let validation_rule3 = ValidationRule::new(1.0, vec!["roles".to_owned()], 0.5).unwrap();
let validation_rule4 = ValidationRule::new(1.0, vec!["public1".to_owned(), "public2".to_owned()], 0.5).unwrap();
let role_def1 = RoleDefinition {
members: vec![alice_bob],
members: vec![alice_bob.clone()],
validation_rules: vec![validation_rule1],
storages: vec![]
};
@ -653,19 +605,38 @@ mod tests {
storages: vec![]
};
let role_def_roles = RoleDefinition {
members: vec![alice_bob.clone()],
validation_rules: vec![validation_rule3],
storages: vec![]
};
let role_def_public_data = RoleDefinition {
members: vec![alice_bob],
validation_rules: vec![validation_rule4],
storages: vec![]
};
let roles: BTreeMap<String, RoleDefinition> = BTreeMap::from([
("role1".to_owned(), role_def1),
("role2".to_owned(), role_def2)
("role2".to_owned(), role_def2),
("role_roles".to_owned(), role_def_roles),
("role_public_data".to_owned(), role_def_public_data)
]);
let clear_pcd = json!({
let clear_pcd: BTreeMap<String, Value> = serde_json::from_value(json!({
"field1": "value1",
"field2": "value2",
});
})).unwrap();
let outpoint = OutPoint::null();
let public_data: BTreeMap<String, Value> = serde_json::from_value(json!({
"public1": "public1",
"public2": "public2",
})).unwrap();
ProcessState::new(outpoint, clear_pcd.as_object().unwrap().clone(), &BTreeMap::new(), roles).unwrap()
ProcessState::new(outpoint, Pcd::new(clear_pcd), Pcd::new(public_data), Roles::new(roles)).unwrap()
}
#[test]
@ -839,8 +810,8 @@ mod tests {
fn test_valid_everyone_signs_with_prev_state() {
let state = dummy_process_state();
let mut new_state = state.clone();
let key_to_modify = state.encrypted_pcd.as_object().unwrap().keys().next().unwrap();
new_state.update_value(key_to_modify.as_str(), Value::String("new_value1".to_string())).unwrap();
let key_to_modify = state.pcd_commitment.keys().into_iter().next().unwrap();
new_state.update_value(key_to_modify.as_str(), &Value::String("new_value1".to_string())).unwrap();
let alice_key: SecretKey = create_alice_wallet()
.get_client()
.get_spend_key()
@ -869,8 +840,8 @@ mod tests {
fn test_error_not_right_signatures_with_prev_state() {
let state = dummy_process_state();
let mut new_state = state.clone();
let key_to_modify = state.encrypted_pcd.as_object().unwrap().keys().next().unwrap();
new_state.update_value(key_to_modify.as_str(), Value::String("new_value1".to_string())).unwrap();
let key_to_modify = state.pcd_commitment.keys().into_iter().next().unwrap();
new_state.update_value(key_to_modify.as_str(), &Value::String("new_value1".to_string())).unwrap();
let carol_key: SecretKey = create_carol_wallet()
.get_client()
.get_spend_key()