Adding a working bft-json-crdt implementation for the PoC
This commit is contained in:
317
crates/bft-json-crdt/src/debug.rs
Normal file
317
crates/bft-json-crdt/src/debug.rs
Normal file
@@ -0,0 +1,317 @@
|
||||
use crate::{
|
||||
json_crdt::{BaseCrdt, CrdtNode, SignedOp},
|
||||
keypair::SignedDigest,
|
||||
list_crdt::ListCrdt,
|
||||
op::{Op, OpId, PathSegment},
|
||||
};
|
||||
|
||||
#[cfg(feature = "logging-base")]
|
||||
use {
|
||||
crate::{
|
||||
keypair::{lsb_32, AuthorId},
|
||||
op::{print_hex, print_path, ROOT_ID},
|
||||
},
|
||||
colored::Colorize,
|
||||
random_color::{Luminosity, RandomColor},
|
||||
};
|
||||
|
||||
#[cfg(feature = "logging-list")]
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
|
||||
#[cfg(feature = "logging-base")]
|
||||
fn author_to_hex(author: AuthorId) -> String {
|
||||
format!("{:#010x}", lsb_32(author))
|
||||
}
|
||||
|
||||
#[cfg(feature = "logging-base")]
|
||||
fn display_op_id<T: CrdtNode>(op: &Op<T>) -> String {
|
||||
let [r, g, b] = RandomColor::new()
|
||||
.luminosity(Luminosity::Light)
|
||||
.seed(lsb_32(op.author))
|
||||
.to_rgb_array();
|
||||
format!(
|
||||
"[{},{}]",
|
||||
author_to_hex(op.author).bold().truecolor(r, g, b),
|
||||
op.seq.to_string().yellow()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn debug_type_mismatch(_msg: String) {
|
||||
#[cfg(feature = "logging-base")]
|
||||
{
|
||||
println!(" {}\n {_msg}", "type mismatch! ignoring this node".red(),);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug_path_mismatch(_our_path: Vec<PathSegment>, _op_path: Vec<PathSegment>) {
|
||||
#[cfg(feature = "logging-base")]
|
||||
{
|
||||
println!(
|
||||
" {}\n current path: {}\n op path: {}",
|
||||
"path mismatch!".red(),
|
||||
print_path(_our_path),
|
||||
print_path(_op_path),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug_op_on_primitive(_op_path: Vec<PathSegment>) {
|
||||
#[cfg(feature = "logging-base")]
|
||||
{
|
||||
println!(
|
||||
" {} this is an error, ignoring op.\n op path: {}",
|
||||
"trying to apply() on a primitive!".red(),
|
||||
print_path(_op_path),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "logging-base")]
|
||||
fn display_author(author: AuthorId) -> String {
|
||||
let [r, g, b] = RandomColor::new()
|
||||
.luminosity(Luminosity::Light)
|
||||
.seed(lsb_32(author))
|
||||
.to_rgb_array();
|
||||
format!(" {} ", author_to_hex(author))
|
||||
.black()
|
||||
.on_truecolor(r, g, b)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub trait DebugView {
|
||||
fn debug_view(&self, indent: usize) -> String;
|
||||
}
|
||||
|
||||
impl<T: CrdtNode + DebugView> BaseCrdt<T> {
|
||||
pub fn debug_view(&self) {
|
||||
#[cfg(feature = "logging-json")]
|
||||
println!("document is now:\n{}", self.doc.debug_view(0));
|
||||
}
|
||||
|
||||
pub fn log_try_apply(&self, _op: &SignedOp) {
|
||||
#[cfg(feature = "logging-json")]
|
||||
println!(
|
||||
"{} trying to apply operation {} from {}",
|
||||
display_author(self.id),
|
||||
&print_hex(&_op.signed_digest)[..6],
|
||||
display_author(_op.inner.author())
|
||||
);
|
||||
}
|
||||
|
||||
pub fn debug_digest_failure(&self, _op: SignedOp) {
|
||||
#[cfg(feature = "logging-json")]
|
||||
println!(
|
||||
" {} cannot confirm signed_digest from {}",
|
||||
"digest failure!".red(),
|
||||
display_author(_op.author())
|
||||
);
|
||||
}
|
||||
|
||||
pub fn log_missing_causal_dep(&self, _missing: &SignedDigest) {
|
||||
#[cfg(feature = "logging-json")]
|
||||
println!(
|
||||
" {} haven't received op with digest {}",
|
||||
"missing causal dependency".red(),
|
||||
print_hex(_missing)
|
||||
);
|
||||
}
|
||||
|
||||
pub fn log_actually_apply(&self, _op: &SignedOp) {
|
||||
#[cfg(feature = "logging-json")]
|
||||
{
|
||||
println!(
|
||||
" applying op to path: /{}",
|
||||
print_path(_op.inner.path.clone())
|
||||
);
|
||||
println!("{}", _op.inner.debug_view(2));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Op<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
pub fn debug_hash_failure(&self) {
|
||||
#[cfg(feature = "logging-base")]
|
||||
{
|
||||
println!(" {}", "hash failure!".red());
|
||||
println!(" expected: {}", print_hex(&self.id));
|
||||
println!(" computed: {}", print_hex(&self.hash_to_id()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DebugView for T
|
||||
where
|
||||
T: Display,
|
||||
{
|
||||
#[cfg(feature = "logging-base")]
|
||||
fn debug_view(&self, _indent: usize) -> String {
|
||||
self.to_string()
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "logging-base"))]
|
||||
fn debug_view(&self, _indent: usize) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DebugView for Op<T>
|
||||
where
|
||||
T: DebugView + CrdtNode,
|
||||
{
|
||||
#[cfg(not(feature = "logging-base"))]
|
||||
fn debug_view(&self, _indent: usize) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
#[cfg(feature = "logging-json")]
|
||||
fn debug_view(&self, indent: usize) -> String {
|
||||
let op_id = display_op_id(self);
|
||||
let content = if self.id == ROOT_ID && self.content.is_none() {
|
||||
"root".blue().bold().to_string()
|
||||
} else {
|
||||
self.content
|
||||
.as_ref()
|
||||
.map_or("[empty]".to_string(), |c| c.debug_view(indent + 2))
|
||||
};
|
||||
let content_str = if self.is_deleted && self.id != ROOT_ID {
|
||||
content.red().strikethrough().to_string()
|
||||
} else {
|
||||
content
|
||||
};
|
||||
|
||||
format!("{op_id} {content_str}")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
pub fn log_ops(&self, highlight: Option<OpId>) {
|
||||
#[cfg(feature = "logging-list")]
|
||||
{
|
||||
let mut lines = Vec::<String>::new();
|
||||
|
||||
// do in-order traversal
|
||||
let res: Vec<&Op<T>> = self.ops.iter().collect();
|
||||
if res.is_empty() {
|
||||
println!("[empty]");
|
||||
}
|
||||
|
||||
// figure out parent-child hierarchies from origins
|
||||
let mut parent_child_map: HashMap<OpId, Vec<OpId>> = HashMap::new();
|
||||
for op in &res {
|
||||
let children = parent_child_map.entry(op.origin).or_default();
|
||||
children.push(op.id);
|
||||
}
|
||||
|
||||
let is_last = |op: &Op<T>| -> bool {
|
||||
if op.id == ROOT_ID {
|
||||
return true;
|
||||
}
|
||||
if let Some(children) = parent_child_map.get(&op.origin) {
|
||||
return *children.last().unwrap() == op.id;
|
||||
}
|
||||
false
|
||||
};
|
||||
|
||||
// make stack of origins
|
||||
let mut stack: Vec<(OpId, &str)> = Vec::new();
|
||||
stack.push((ROOT_ID, ""));
|
||||
let mut prev = None;
|
||||
for op in &res {
|
||||
let origin_idx = self.find_idx(op.origin).unwrap();
|
||||
let origin = &res[origin_idx];
|
||||
let origin_id = origin.id;
|
||||
if let Some(prev) = prev {
|
||||
if origin_id == prev {
|
||||
// went down one layer, add to stack
|
||||
let stack_prefix_char = if is_last(origin) { " " } else { "│ " };
|
||||
stack.push((prev, stack_prefix_char));
|
||||
}
|
||||
}
|
||||
|
||||
// pop back up until we reach the right origin
|
||||
while stack.last().unwrap().0 != origin_id {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
let cur_char = if is_last(op) { "╰─" } else { "├─" };
|
||||
let prefixes = stack.iter().map(|s| s.1).collect::<Vec<_>>().join("");
|
||||
let highlight_text = if highlight.is_some() && highlight.unwrap() == op.id {
|
||||
if op.is_deleted {
|
||||
"<- deleted".bold().red()
|
||||
} else {
|
||||
"<- inserted".bold().green()
|
||||
}
|
||||
.to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let content = if op.id == ROOT_ID {
|
||||
"root".blue().bold().to_string()
|
||||
} else {
|
||||
op.content
|
||||
.as_ref()
|
||||
.map_or("[empty]".to_string(), |c| c.hash())
|
||||
};
|
||||
if op.is_deleted && op.id != ROOT_ID {
|
||||
lines.push(format!(
|
||||
"{}{}{} {} {}",
|
||||
prefixes,
|
||||
cur_char,
|
||||
display_op_id(op),
|
||||
content.strikethrough().red(),
|
||||
highlight_text
|
||||
));
|
||||
} else {
|
||||
lines.push(format!(
|
||||
"{}{}{} {} {}",
|
||||
prefixes,
|
||||
cur_char,
|
||||
display_op_id(op),
|
||||
content,
|
||||
highlight_text
|
||||
));
|
||||
}
|
||||
prev = Some(op.id);
|
||||
}
|
||||
|
||||
// full string
|
||||
let flat = self.iter().map(|t| t.hash()).collect::<Vec<_>>().join("");
|
||||
lines.push(format!("Flattened result: {}", flat));
|
||||
println!("{}", lines.join("\n"));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_apply(&self, op: &Op<T>) {
|
||||
#[cfg(feature = "logging-list")]
|
||||
{
|
||||
if op.is_deleted {
|
||||
println!(
|
||||
"{} Performing a delete of {}@{}",
|
||||
display_author(self.our_id),
|
||||
display_op_id(op),
|
||||
op.sequence_num(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(content) = op.content.as_ref() {
|
||||
println!(
|
||||
"{} Performing an insert of {}@{}: '{}' after {}",
|
||||
display_author(self.our_id),
|
||||
display_op_id(op),
|
||||
op.sequence_num(),
|
||||
content.hash(),
|
||||
display_op_id(op)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
868
crates/bft-json-crdt/src/json_crdt.rs
Normal file
868
crates/bft-json-crdt/src/json_crdt.rs
Normal file
@@ -0,0 +1,868 @@
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Display,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
debug::{debug_op_on_primitive, DebugView},
|
||||
keypair::{sha256, sign, AuthorId, SignedDigest},
|
||||
list_crdt::ListCrdt,
|
||||
lww_crdt::LwwRegisterCrdt,
|
||||
op::{print_hex, print_path, Hashable, Op, OpId, PathSegment},
|
||||
};
|
||||
pub use bft_crdt_derive::*;
|
||||
use fastcrypto::traits::VerifyingKey;
|
||||
use fastcrypto::{
|
||||
ed25519::{Ed25519KeyPair, Ed25519PublicKey, Ed25519Signature},
|
||||
traits::{KeyPair, ToFromBytes},
|
||||
// Verifier,
|
||||
};
|
||||
|
||||
/// Anything that can be nested in a JSON CRDT
|
||||
pub trait CrdtNode: CrdtNodeFromValue + Hashable + Clone {
|
||||
/// Create a new CRDT of this type
|
||||
fn new(id: AuthorId, path: Vec<PathSegment>) -> Self;
|
||||
/// Apply an operation to this CRDT, forwarding if necessary
|
||||
fn apply(&mut self, op: Op<Value>) -> OpState;
|
||||
/// Get a JSON representation of the value in this node
|
||||
fn view(&self) -> Value;
|
||||
}
|
||||
|
||||
/// Enum representing possible outcomes of applying an operation to a CRDT
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum OpState {
|
||||
/// Operation applied successfully
|
||||
Ok,
|
||||
/// Tried to apply an operation to a non-CRDT primative (i.e. f64, bool, etc.)
|
||||
/// If you would like a mutable primitive, wrap it in a [`LWWRegisterCRDT`]
|
||||
ErrApplyOnPrimitive,
|
||||
/// Tried to apply an operation to a static struct CRDT
|
||||
/// If you would like a mutable object, use a [`Value`]
|
||||
ErrApplyOnStruct,
|
||||
/// Tried to apply an operation that contains content of the wrong type.
|
||||
/// In other words, the content cannot be coerced to the CRDT at the path specified.
|
||||
ErrMismatchedType,
|
||||
/// The signed digest of the message did not match the claimed author of the message.
|
||||
/// This can happen if the message was tampered with during delivery
|
||||
ErrDigestMismatch,
|
||||
/// The hash of the message did not match the contents of the mesage.
|
||||
/// This can happen if the author tried to perform an equivocation attack by creating an
|
||||
/// operation and modifying it has already been created
|
||||
ErrHashMismatch,
|
||||
/// Tried to apply an operation to a non-existent path. The author may have forgotten to attach
|
||||
/// a causal dependency
|
||||
ErrPathMismatch,
|
||||
/// Trying to modify/delete the sentinel (zero-th) node element that is used for book-keeping
|
||||
ErrListApplyToEmpty,
|
||||
/// We have not received all of the causal dependencies of this operation. It has been queued
|
||||
/// up and will be executed when its causal dependencies have been delivered
|
||||
MissingCausalDependencies,
|
||||
}
|
||||
|
||||
/// The following types can be used as a 'terminal' type in CRDTs
|
||||
pub trait MarkPrimitive: Into<Value> + Default {}
|
||||
impl MarkPrimitive for bool {}
|
||||
impl MarkPrimitive for i32 {}
|
||||
impl MarkPrimitive for i64 {}
|
||||
impl MarkPrimitive for f64 {}
|
||||
impl MarkPrimitive for char {}
|
||||
impl MarkPrimitive for String {}
|
||||
impl MarkPrimitive for Value {}
|
||||
|
||||
/// Implement CrdtNode for non-CRDTs
|
||||
/// This is a stub implementation so most functions don't do anything/log an error
|
||||
impl<T> CrdtNode for T
|
||||
where
|
||||
T: CrdtNodeFromValue + MarkPrimitive + Hashable + Clone,
|
||||
{
|
||||
fn apply(&mut self, _op: Op<Value>) -> OpState {
|
||||
OpState::ErrApplyOnPrimitive
|
||||
}
|
||||
|
||||
fn view(&self) -> Value {
|
||||
self.to_owned().into()
|
||||
}
|
||||
|
||||
fn new(_id: AuthorId, _path: Vec<PathSegment>) -> Self {
|
||||
debug_op_on_primitive(_path);
|
||||
Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// The base struct for a JSON CRDT. Allows for declaring causal
|
||||
/// dependencies across fields. It only accepts messages of [`SignedOp`] for BFT.
|
||||
pub struct BaseCrdt<T: CrdtNode> {
|
||||
/// Public key of this CRDT
|
||||
pub id: AuthorId,
|
||||
|
||||
/// Internal base CRDT
|
||||
pub doc: T,
|
||||
|
||||
/// In a real world scenario, this would be a proper hashgraph that allows for
|
||||
/// efficient reconciliation of missing dependencies. We naively keep a hashset
|
||||
/// of messages we've seen (represented by their [`SignedDigest`]).
|
||||
received: HashSet<SignedDigest>,
|
||||
message_q: HashMap<SignedDigest, Vec<SignedOp>>,
|
||||
}
|
||||
|
||||
/// An [`Op<Value>`] with a few bits of extra metadata
|
||||
#[derive(Clone)]
|
||||
pub struct SignedOp {
|
||||
// Note that this can be different from the author of the inner op as the inner op could have been created
|
||||
// by a different person
|
||||
author: AuthorId,
|
||||
/// Signed hash using priv key of author. Effectively [`OpID`] Use this as the ID to figure out what has been delivered already
|
||||
pub signed_digest: SignedDigest,
|
||||
pub inner: Op<Value>,
|
||||
/// List of causal dependencies
|
||||
pub depends_on: Vec<SignedDigest>,
|
||||
}
|
||||
|
||||
impl SignedOp {
|
||||
pub fn id(&self) -> OpId {
|
||||
self.inner.id
|
||||
}
|
||||
|
||||
pub fn author(&self) -> AuthorId {
|
||||
self.author
|
||||
}
|
||||
|
||||
/// Creates a digest of the following fields. Any changes in the fields will change the signed digest
|
||||
/// - id (hash of the following)
|
||||
/// - origin
|
||||
/// - author
|
||||
/// - seq
|
||||
/// - is_deleted
|
||||
/// - path
|
||||
/// - dependencies
|
||||
fn digest(&self) -> [u8; 32] {
|
||||
let path_string = print_path(self.inner.path.clone());
|
||||
let dependency_string = self
|
||||
.depends_on
|
||||
.iter()
|
||||
.map(print_hex)
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
let fmt_str = format!("{:?},{path_string},{dependency_string}", self.id());
|
||||
sha256(fmt_str)
|
||||
}
|
||||
|
||||
/// Sign this digest with the given keypair. Shouldn't need to be called manually,
|
||||
/// just use [`SignedOp::from_op`] instead
|
||||
fn sign_digest(&mut self, keypair: &Ed25519KeyPair) {
|
||||
self.signed_digest = sign(keypair, &self.digest()).sig.to_bytes()
|
||||
}
|
||||
|
||||
/// Ensure digest was actually signed by the author it claims to be signed by
|
||||
pub fn is_valid_digest(&self) -> bool {
|
||||
let digest = Ed25519Signature::from_bytes(&self.signed_digest);
|
||||
let pubkey = Ed25519PublicKey::from_bytes(&self.author());
|
||||
match (digest, pubkey) {
|
||||
(Ok(digest), Ok(pubkey)) => pubkey.verify(&self.digest(), &digest).is_ok(),
|
||||
(_, _) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sign a normal op and add all the needed metadata
|
||||
pub fn from_op<T: CrdtNode>(
|
||||
value: Op<T>,
|
||||
keypair: &Ed25519KeyPair,
|
||||
depends_on: Vec<SignedDigest>,
|
||||
) -> Self {
|
||||
let author = keypair.public().0.to_bytes();
|
||||
let mut new = Self {
|
||||
inner: Op {
|
||||
content: value.content.map(|c| c.view()),
|
||||
origin: value.origin,
|
||||
author: value.author,
|
||||
seq: value.seq,
|
||||
path: value.path,
|
||||
is_deleted: value.is_deleted,
|
||||
id: value.id,
|
||||
},
|
||||
author,
|
||||
signed_digest: [0u8; 64],
|
||||
depends_on,
|
||||
};
|
||||
new.sign_digest(keypair);
|
||||
new
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: CrdtNode + DebugView> BaseCrdt<T> {
|
||||
/// Crease a new BaseCRDT of the given type. Multiple BaseCRDTs
|
||||
/// can be created from a single keypair but you are responsible for
|
||||
/// routing messages to the right BaseCRDT. Usually you should just make a single
|
||||
/// struct that contains all the state you need
|
||||
pub fn new(keypair: &Ed25519KeyPair) -> Self {
|
||||
let id = keypair.public().0.to_bytes();
|
||||
Self {
|
||||
id,
|
||||
doc: T::new(id, vec![]),
|
||||
received: HashSet::new(),
|
||||
message_q: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply a signed operation to this BaseCRDT, verifying integrity and routing to the right
|
||||
/// nested CRDT
|
||||
pub fn apply(&mut self, op: SignedOp) -> OpState {
|
||||
self.log_try_apply(&op);
|
||||
|
||||
#[cfg(feature = "bft")]
|
||||
if !op.is_valid_digest() {
|
||||
self.debug_digest_failure(op);
|
||||
return OpState::ErrDigestMismatch;
|
||||
}
|
||||
|
||||
let op_id = op.signed_digest;
|
||||
if !op.depends_on.is_empty() {
|
||||
for origin in &op.depends_on {
|
||||
if !self.received.contains(origin) {
|
||||
self.log_missing_causal_dep(origin);
|
||||
self.message_q.entry(*origin).or_default().push(op);
|
||||
return OpState::MissingCausalDependencies;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply
|
||||
self.log_actually_apply(&op);
|
||||
let status = self.doc.apply(op.inner);
|
||||
self.debug_view();
|
||||
self.received.insert(op_id);
|
||||
|
||||
// apply all of its causal dependents if there are any
|
||||
let dependent_queue = self.message_q.remove(&op_id);
|
||||
if let Some(mut q) = dependent_queue {
|
||||
for dependent in q.drain(..) {
|
||||
self.apply(dependent);
|
||||
}
|
||||
}
|
||||
status
|
||||
}
|
||||
}
|
||||
|
||||
/// An enum representing a JSON value
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Value {
|
||||
Null,
|
||||
Bool(bool),
|
||||
Number(f64),
|
||||
String(String),
|
||||
Array(Vec<Value>),
|
||||
Object(HashMap<String, Value>),
|
||||
}
|
||||
|
||||
impl Display for Value {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
Value::Null => "null".to_string(),
|
||||
Value::Bool(b) => b.to_string(),
|
||||
Value::Number(n) => n.to_string(),
|
||||
Value::String(s) => format!("\"{s}\""),
|
||||
Value::Array(arr) => {
|
||||
if arr.len() > 1 {
|
||||
format!(
|
||||
"[\n{}\n]",
|
||||
arr.iter()
|
||||
.map(|x| format!(" {x}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",\n")
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"[ {} ]",
|
||||
arr.iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
Value::Object(obj) => format!(
|
||||
"{{ {} }}",
|
||||
obj.iter()
|
||||
.map(|(k, v)| format!(" \"{k}\": {v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",\n")
|
||||
),
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Value {
|
||||
fn default() -> Self {
|
||||
Self::Null
|
||||
}
|
||||
}
|
||||
|
||||
/// Allow easy conversion to and from serde's JSON format. This allows us to use the [`json!`]
|
||||
/// macro
|
||||
impl From<Value> for serde_json::Value {
|
||||
fn from(value: Value) -> Self {
|
||||
match value {
|
||||
Value::Null => serde_json::Value::Null,
|
||||
Value::Bool(x) => serde_json::Value::Bool(x),
|
||||
Value::Number(x) => serde_json::Value::Number(serde_json::Number::from_f64(x).unwrap()),
|
||||
Value::String(x) => serde_json::Value::String(x),
|
||||
Value::Array(x) => {
|
||||
serde_json::Value::Array(x.iter().map(|a| a.clone().into()).collect())
|
||||
}
|
||||
Value::Object(x) => serde_json::Value::Object(
|
||||
x.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone().into()))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Value> for Value {
|
||||
fn from(value: serde_json::Value) -> Self {
|
||||
match value {
|
||||
serde_json::Value::Null => Value::Null,
|
||||
serde_json::Value::Bool(x) => Value::Bool(x),
|
||||
serde_json::Value::Number(x) => Value::Number(x.as_f64().unwrap()),
|
||||
serde_json::Value::String(x) => Value::String(x),
|
||||
serde_json::Value::Array(x) => {
|
||||
Value::Array(x.iter().map(|a| a.clone().into()).collect())
|
||||
}
|
||||
serde_json::Value::Object(x) => Value::Object(
|
||||
x.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone().into()))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub fn into_json(self) -> serde_json::Value {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Conversions from primitive types to [`Value`]
|
||||
impl From<bool> for Value {
|
||||
fn from(val: bool) -> Self {
|
||||
Value::Bool(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for Value {
|
||||
fn from(val: i64) -> Self {
|
||||
Value::Number(val as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for Value {
|
||||
fn from(val: i32) -> Self {
|
||||
Value::Number(val as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Value {
|
||||
fn from(val: f64) -> Self {
|
||||
Value::Number(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Value {
|
||||
fn from(val: String) -> Self {
|
||||
Value::String(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<char> for Value {
|
||||
fn from(val: char) -> Self {
|
||||
Value::String(val.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Option<T>> for Value
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn from(val: Option<T>) -> Self {
|
||||
match val {
|
||||
Some(x) => x.view(),
|
||||
None => Value::Null,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Vec<T>> for Value
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn from(value: Vec<T>) -> Self {
|
||||
Value::Array(value.iter().map(|x| x.view()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
/// Fallibly create a CRDT Node from a JSON Value
|
||||
pub trait CrdtNodeFromValue: Sized {
|
||||
fn node_from(value: Value, id: AuthorId, path: Vec<PathSegment>) -> Result<Self, String>;
|
||||
}
|
||||
|
||||
/// Fallibly cast a JSON Value into a CRDT Node
|
||||
pub trait IntoCrdtNode<T>: Sized {
|
||||
fn into_node(self, id: AuthorId, path: Vec<PathSegment>) -> Result<T, String>;
|
||||
}
|
||||
|
||||
/// [`CrdtNodeFromValue`] implies [`IntoCRDTNode<T>`]
|
||||
impl<T> IntoCrdtNode<T> for Value
|
||||
where
|
||||
T: CrdtNodeFromValue,
|
||||
{
|
||||
fn into_node(self, id: AuthorId, path: Vec<PathSegment>) -> Result<T, String> {
|
||||
T::node_from(self, id, path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Trivial conversion from Value to Value as CrdtNodeFromValue
|
||||
impl CrdtNodeFromValue for Value {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
Ok(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Conversions from primitives to CRDTs
|
||||
impl CrdtNodeFromValue for bool {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::Bool(x) = value {
|
||||
Ok(x)
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> bool"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CrdtNodeFromValue for f64 {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::Number(x) = value {
|
||||
Ok(x)
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> f64"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CrdtNodeFromValue for i64 {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::Number(x) = value {
|
||||
Ok(x as i64)
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> f64"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CrdtNodeFromValue for String {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::String(x) = value {
|
||||
Ok(x)
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> String"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CrdtNodeFromValue for char {
|
||||
fn node_from(value: Value, _id: AuthorId, _path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::String(x) = value.clone() {
|
||||
x.chars().next().ok_or(format!(
|
||||
"failed to convert {value:?} -> char: found a zero-length string"
|
||||
))
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> char"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CrdtNodeFromValue for LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn node_from(value: Value, id: AuthorId, path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
let mut crdt = LwwRegisterCrdt::new(id, path);
|
||||
crdt.set(value);
|
||||
Ok(crdt)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CrdtNodeFromValue for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn node_from(value: Value, id: AuthorId, path: Vec<PathSegment>) -> Result<Self, String> {
|
||||
if let Value::Array(arr) = value {
|
||||
let mut crdt = ListCrdt::new(id, path);
|
||||
let result: Result<(), String> =
|
||||
arr.into_iter().enumerate().try_for_each(|(i, val)| {
|
||||
crdt.insert_idx(i, val);
|
||||
Ok(())
|
||||
});
|
||||
result?;
|
||||
Ok(crdt)
|
||||
} else {
|
||||
Err(format!("failed to convert {value:?} -> ListCRDT<T>"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{
|
||||
json_crdt::{add_crdt_fields, BaseCrdt, CrdtNode, IntoCrdtNode, OpState, Value},
|
||||
keypair::make_keypair,
|
||||
list_crdt::ListCrdt,
|
||||
lww_crdt::LwwRegisterCrdt,
|
||||
op::{print_path, ROOT_ID},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_derive_basic() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Player {
|
||||
x: LwwRegisterCrdt<f64>,
|
||||
y: LwwRegisterCrdt<f64>,
|
||||
}
|
||||
|
||||
let keypair = make_keypair();
|
||||
let crdt = BaseCrdt::<Player>::new(&keypair);
|
||||
assert_eq!(print_path(crdt.doc.x.path), "x");
|
||||
assert_eq!(print_path(crdt.doc.y.path), "y");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_nested() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Position {
|
||||
x: LwwRegisterCrdt<f64>,
|
||||
y: LwwRegisterCrdt<f64>,
|
||||
}
|
||||
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Player {
|
||||
pos: Position,
|
||||
balance: LwwRegisterCrdt<f64>,
|
||||
messages: ListCrdt<String>,
|
||||
}
|
||||
|
||||
let keypair = make_keypair();
|
||||
let crdt = BaseCrdt::<Player>::new(&keypair);
|
||||
assert_eq!(print_path(crdt.doc.pos.x.path), "pos.x");
|
||||
assert_eq!(print_path(crdt.doc.pos.y.path), "pos.y");
|
||||
assert_eq!(print_path(crdt.doc.balance.path), "balance");
|
||||
assert_eq!(print_path(crdt.doc.messages.path), "messages");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lww_ops() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Test {
|
||||
a: LwwRegisterCrdt<f64>,
|
||||
b: LwwRegisterCrdt<bool>,
|
||||
c: LwwRegisterCrdt<String>,
|
||||
}
|
||||
|
||||
let kp1 = make_keypair();
|
||||
let kp2 = make_keypair();
|
||||
let mut base1 = BaseCrdt::<Test>::new(&kp1);
|
||||
let mut base2 = BaseCrdt::<Test>::new(&kp2);
|
||||
|
||||
let _1_a_1 = base1.doc.a.set(3.0).sign(&kp1);
|
||||
let _1_b_1 = base1.doc.b.set(true).sign(&kp1);
|
||||
let _2_a_1 = base2.doc.a.set(1.5).sign(&kp2);
|
||||
let _2_a_2 = base2.doc.a.set(2.13).sign(&kp2);
|
||||
let _2_c_1 = base2.doc.c.set("abc".to_string()).sign(&kp2);
|
||||
|
||||
assert_eq!(base1.doc.a.view(), json!(3.0).into());
|
||||
assert_eq!(base2.doc.a.view(), json!(2.13).into());
|
||||
assert_eq!(base1.doc.b.view(), json!(true).into());
|
||||
assert_eq!(base2.doc.c.view(), json!("abc").into());
|
||||
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"a": 3.0,
|
||||
"b": true,
|
||||
"c": null,
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
base2.doc.view().into_json(),
|
||||
json!({
|
||||
"a": 2.13,
|
||||
"b": null,
|
||||
"c": "abc",
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(base2.apply(_1_a_1), OpState::Ok);
|
||||
assert_eq!(base2.apply(_1_b_1), OpState::Ok);
|
||||
assert_eq!(base1.apply(_2_a_1), OpState::Ok);
|
||||
assert_eq!(base1.apply(_2_a_2), OpState::Ok);
|
||||
assert_eq!(base1.apply(_2_c_1), OpState::Ok);
|
||||
|
||||
assert_eq!(base1.doc.view().into_json(), base2.doc.view().into_json());
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"a": 2.13,
|
||||
"b": true,
|
||||
"c": "abc"
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_and_map_ops() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Test {
|
||||
a: ListCrdt<String>,
|
||||
}
|
||||
|
||||
let kp1 = make_keypair();
|
||||
let kp2 = make_keypair();
|
||||
let mut base1 = BaseCrdt::<Test>::new(&kp1);
|
||||
let mut base2 = BaseCrdt::<Test>::new(&kp2);
|
||||
|
||||
let _1a = base1.doc.a.insert(ROOT_ID, "a".to_string()).sign(&kp1);
|
||||
let _1b = base1.doc.a.insert(_1a.id(), "b".to_string()).sign(&kp1);
|
||||
let _2c = base2.doc.a.insert(ROOT_ID, "c".to_string()).sign(&kp2);
|
||||
let _2d = base2.doc.a.insert(_1b.id(), "d".to_string()).sign(&kp2);
|
||||
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"a": ["a", "b"],
|
||||
})
|
||||
);
|
||||
|
||||
// as _1b hasn't been delivered to base2 yet
|
||||
assert_eq!(
|
||||
base2.doc.view().into_json(),
|
||||
json!({
|
||||
"a": ["c"],
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(base2.apply(_1b), OpState::MissingCausalDependencies);
|
||||
assert_eq!(base2.apply(_1a), OpState::Ok);
|
||||
assert_eq!(base1.apply(_2d), OpState::Ok);
|
||||
assert_eq!(base1.apply(_2c), OpState::Ok);
|
||||
assert_eq!(base1.doc.view().into_json(), base2.doc.view().into_json());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_causal_field_dependency() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Item {
|
||||
name: LwwRegisterCrdt<String>,
|
||||
soulbound: LwwRegisterCrdt<bool>,
|
||||
}
|
||||
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Player {
|
||||
inventory: ListCrdt<Item>,
|
||||
balance: LwwRegisterCrdt<f64>,
|
||||
}
|
||||
|
||||
let kp1 = make_keypair();
|
||||
let kp2 = make_keypair();
|
||||
let mut base1 = BaseCrdt::<Player>::new(&kp1);
|
||||
let mut base2 = BaseCrdt::<Player>::new(&kp2);
|
||||
|
||||
// require balance update to happen before inventory update
|
||||
let _add_money = base1.doc.balance.set(5000.0).sign(&kp1);
|
||||
let _spend_money = base1
|
||||
.doc
|
||||
.balance
|
||||
.set(3000.0)
|
||||
.sign_with_dependencies(&kp1, vec![&_add_money]);
|
||||
|
||||
let sword: Value = json!({
|
||||
"name": "Sword",
|
||||
"soulbound": true,
|
||||
})
|
||||
.into();
|
||||
let _new_inventory_item = base1
|
||||
.doc
|
||||
.inventory
|
||||
.insert_idx(0, sword)
|
||||
.sign_with_dependencies(&kp1, vec![&_spend_money]);
|
||||
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"balance": 3000.0,
|
||||
"inventory": [
|
||||
{
|
||||
"name": "Sword",
|
||||
"soulbound": true
|
||||
}
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
// do it completely out of order
|
||||
assert_eq!(
|
||||
base2.apply(_new_inventory_item),
|
||||
OpState::MissingCausalDependencies
|
||||
);
|
||||
assert_eq!(
|
||||
base2.apply(_spend_money),
|
||||
OpState::MissingCausalDependencies
|
||||
);
|
||||
assert_eq!(base2.apply(_add_money), OpState::Ok);
|
||||
assert_eq!(base1.doc.view().into_json(), base2.doc.view().into_json());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2d_grid() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Game {
|
||||
grid: ListCrdt<ListCrdt<LwwRegisterCrdt<bool>>>,
|
||||
}
|
||||
|
||||
let kp1 = make_keypair();
|
||||
let kp2 = make_keypair();
|
||||
let mut base1 = BaseCrdt::<Game>::new(&kp1);
|
||||
let mut base2 = BaseCrdt::<Game>::new(&kp2);
|
||||
|
||||
// init a 2d grid
|
||||
let row0: Value = json!([true, false]).into();
|
||||
let row1: Value = json!([false, true]).into();
|
||||
let construct1 = base1.doc.grid.insert_idx(0, row0).sign(&kp1);
|
||||
let construct2 = base1.doc.grid.insert_idx(1, row1).sign(&kp1);
|
||||
|
||||
assert_eq!(base2.apply(construct1), OpState::Ok);
|
||||
assert_eq!(base2.apply(construct2.clone()), OpState::Ok);
|
||||
|
||||
assert_eq!(base1.doc.view().into_json(), base2.doc.view().into_json());
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"grid": [[true, false], [false, true]]
|
||||
})
|
||||
);
|
||||
|
||||
let set1 = base1.doc.grid[0][0].set(false).sign(&kp1);
|
||||
let set2 = base2.doc.grid[1][1].set(false).sign(&kp2);
|
||||
assert_eq!(base1.apply(set2), OpState::Ok);
|
||||
assert_eq!(base2.apply(set1), OpState::Ok);
|
||||
|
||||
assert_eq!(base1.doc.view().into_json(), base2.doc.view().into_json());
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"grid": [[false, false], [false, false]]
|
||||
})
|
||||
);
|
||||
|
||||
let topright = base1.doc.grid[0].id_at(1).unwrap();
|
||||
base1.doc.grid[0].delete(topright);
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"grid": [[false], [false, false]]
|
||||
})
|
||||
);
|
||||
|
||||
base1.doc.grid.delete(construct2.id());
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"grid": [[false]]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_arb_json() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Test {
|
||||
reg: LwwRegisterCrdt<Value>,
|
||||
}
|
||||
|
||||
let kp1 = make_keypair();
|
||||
let mut base1 = BaseCrdt::<Test>::new(&kp1);
|
||||
|
||||
let base_val: Value = json!({
|
||||
"a": true,
|
||||
"b": "asdf",
|
||||
"c": {
|
||||
"d": [],
|
||||
"e": [ false ]
|
||||
}
|
||||
})
|
||||
.into();
|
||||
base1.doc.reg.set(base_val).sign(&kp1);
|
||||
assert_eq!(
|
||||
base1.doc.view().into_json(),
|
||||
json!({
|
||||
"reg": {
|
||||
"a": true,
|
||||
"b": "asdf",
|
||||
"c": {
|
||||
"d": [],
|
||||
"e": [ false ]
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wrong_json_types() {
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Nested {
|
||||
list: ListCrdt<f64>,
|
||||
}
|
||||
|
||||
#[add_crdt_fields]
|
||||
#[derive(Clone, CrdtNode)]
|
||||
struct Test {
|
||||
reg: LwwRegisterCrdt<bool>,
|
||||
strct: ListCrdt<Nested>,
|
||||
}
|
||||
|
||||
let key = make_keypair();
|
||||
let mut crdt = BaseCrdt::<Test>::new(&key);
|
||||
|
||||
// wrong type should not go through
|
||||
crdt.doc.reg.set(32);
|
||||
assert_eq!(crdt.doc.reg.view(), json!(null).into());
|
||||
crdt.doc.reg.set(true);
|
||||
assert_eq!(crdt.doc.reg.view(), json!(true).into());
|
||||
|
||||
// set nested
|
||||
let mut list_view: Value = crdt.doc.strct.view().into();
|
||||
assert_eq!(list_view, json!([]).into());
|
||||
|
||||
// only keeps actual numbers
|
||||
let list: Value = json!({"list": [0, 123, -0.45, "char", []]}).into();
|
||||
crdt.doc.strct.insert_idx(0, list);
|
||||
list_view = crdt.doc.strct.view().into();
|
||||
assert_eq!(list_view, json!([{ "list": [0, 123, -0.45]}]).into());
|
||||
}
|
||||
}
|
||||
57
crates/bft-json-crdt/src/keypair.rs
Normal file
57
crates/bft-json-crdt/src/keypair.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use fastcrypto::traits::VerifyingKey;
|
||||
pub use fastcrypto::{
|
||||
ed25519::{
|
||||
Ed25519KeyPair, Ed25519PublicKey, Ed25519Signature, ED25519_PUBLIC_KEY_LENGTH,
|
||||
ED25519_SIGNATURE_LENGTH,
|
||||
},
|
||||
traits::{KeyPair, Signer},
|
||||
// Verifier,
|
||||
};
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
/// Represents the ID of a unique node. An Ed25519 public key
|
||||
pub type AuthorId = [u8; ED25519_PUBLIC_KEY_LENGTH];
|
||||
|
||||
/// A signed message
|
||||
pub type SignedDigest = [u8; ED25519_SIGNATURE_LENGTH];
|
||||
|
||||
/// Create a fake public key from a u8
|
||||
pub fn make_author(n: u8) -> AuthorId {
|
||||
let mut id = [0u8; ED25519_PUBLIC_KEY_LENGTH];
|
||||
id[0] = n;
|
||||
id
|
||||
}
|
||||
|
||||
/// Get the least significant 32 bits of a public key
|
||||
pub fn lsb_32(pubkey: AuthorId) -> u32 {
|
||||
((pubkey[0] as u32) << 24)
|
||||
+ ((pubkey[1] as u32) << 16)
|
||||
+ ((pubkey[2] as u32) << 8)
|
||||
+ (pubkey[3] as u32)
|
||||
}
|
||||
|
||||
/// SHA256 hash of a string
|
||||
pub fn sha256(input: String) -> [u8; 32] {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(input.as_bytes());
|
||||
let result = hasher.finalize();
|
||||
let mut bytes = [0u8; 32];
|
||||
bytes.copy_from_slice(&result[..]);
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Generate a random Ed25519 keypair from OS rng
|
||||
pub fn make_keypair() -> Ed25519KeyPair {
|
||||
let mut csprng = rand::thread_rng();
|
||||
Ed25519KeyPair::generate(&mut csprng)
|
||||
}
|
||||
|
||||
/// Sign a byte array
|
||||
pub fn sign(keypair: &Ed25519KeyPair, message: &[u8]) -> Ed25519Signature {
|
||||
keypair.sign(message)
|
||||
}
|
||||
|
||||
/// Verify a byte array was signed by the given pubkey
|
||||
pub fn verify(pubkey: Ed25519PublicKey, message: &[u8], signature: Ed25519Signature) -> bool {
|
||||
pubkey.verify(message, &signature).is_ok()
|
||||
}
|
||||
8
crates/bft-json-crdt/src/lib.rs
Normal file
8
crates/bft-json-crdt/src/lib.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod debug;
|
||||
pub mod json_crdt;
|
||||
pub mod keypair;
|
||||
pub mod list_crdt;
|
||||
pub mod lww_crdt;
|
||||
pub mod op;
|
||||
|
||||
extern crate self as bft_json_crdt;
|
||||
441
crates/bft-json-crdt/src/list_crdt.rs
Normal file
441
crates/bft-json-crdt/src/list_crdt.rs
Normal file
@@ -0,0 +1,441 @@
|
||||
use crate::{
|
||||
debug::debug_path_mismatch,
|
||||
json_crdt::{CrdtNode, OpState, Value},
|
||||
keypair::AuthorId,
|
||||
op::*,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::{max, Ordering},
|
||||
collections::HashMap,
|
||||
fmt::Debug,
|
||||
ops::{Index, IndexMut},
|
||||
};
|
||||
|
||||
/// An RGA-like list CRDT that can store a CRDT-like datatype
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
/// Public key for this node
|
||||
pub our_id: AuthorId,
|
||||
/// Path to this CRDT
|
||||
pub path: Vec<PathSegment>,
|
||||
/// List of all the operations we know of
|
||||
pub ops: Vec<Op<T>>,
|
||||
/// Queue of messages where K is the ID of the message yet to arrive
|
||||
/// and V is the list of operations depending on it
|
||||
message_q: HashMap<OpId, Vec<Op<T>>>,
|
||||
/// The sequence number of this node
|
||||
our_seq: SequenceNumber,
|
||||
}
|
||||
|
||||
impl<T> ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
/// Create a new List CRDT with the given [`AuthorID`] (it should be unique)
|
||||
pub fn new(id: AuthorId, path: Vec<PathSegment>) -> ListCrdt<T> {
|
||||
let ops = vec![Op::make_root()];
|
||||
ListCrdt {
|
||||
our_id: id,
|
||||
path,
|
||||
ops,
|
||||
message_q: HashMap::new(),
|
||||
our_seq: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Locally insert some content causally after the given operation
|
||||
pub fn insert<U: Into<Value>>(&mut self, after: OpId, content: U) -> Op<Value> {
|
||||
let mut op = Op::new(
|
||||
after,
|
||||
self.our_id,
|
||||
self.our_seq + 1,
|
||||
false,
|
||||
Some(content.into()),
|
||||
self.path.to_owned(),
|
||||
);
|
||||
|
||||
// we need to know the op ID before setting the path as [`PathSegment::Index`] requires an
|
||||
// [`OpID`]
|
||||
let new_path = join_path(self.path.to_owned(), PathSegment::Index(op.id));
|
||||
op.path = new_path;
|
||||
self.apply(op.clone());
|
||||
op
|
||||
}
|
||||
|
||||
/// Shorthand function to insert at index locally. Indexing ignores deleted items
|
||||
pub fn insert_idx<U: Into<Value> + Clone>(&mut self, idx: usize, content: U) -> Op<Value> {
|
||||
let mut i = 0;
|
||||
for op in &self.ops {
|
||||
if !op.is_deleted {
|
||||
if idx == i {
|
||||
return self.insert(op.id, content);
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
panic!("index {idx} out of range (length of {i})")
|
||||
}
|
||||
|
||||
/// Shorthand to figure out the OpID of something with a given index.
|
||||
/// Useful for declaring a causal dependency if you didn't create the original
|
||||
pub fn id_at(&self, idx: usize) -> Option<OpId> {
|
||||
let mut i = 0;
|
||||
for op in &self.ops {
|
||||
if !op.is_deleted {
|
||||
if idx == i {
|
||||
return Some(op.id);
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Mark a node as deleted. If the node doesn't exist, it will be stuck
|
||||
/// waiting for that node to be created.
|
||||
pub fn delete(&mut self, id: OpId) -> Op<Value> {
|
||||
let op = Op::new(
|
||||
id,
|
||||
self.our_id,
|
||||
self.our_seq + 1,
|
||||
true,
|
||||
None,
|
||||
join_path(self.path.to_owned(), PathSegment::Index(id)),
|
||||
);
|
||||
self.apply(op.clone());
|
||||
op
|
||||
}
|
||||
|
||||
/// Find the idx of an operation with the given [`OpID`]
|
||||
pub fn find_idx(&self, id: OpId) -> Option<usize> {
|
||||
self.ops.iter().position(|op| op.id == id)
|
||||
}
|
||||
|
||||
/// Apply an operation (both local and remote) to this local list CRDT.
|
||||
/// Forwards it to a nested CRDT if necessary.
|
||||
pub fn apply(&mut self, op: Op<Value>) -> OpState {
|
||||
if !op.is_valid_hash() {
|
||||
return OpState::ErrHashMismatch;
|
||||
}
|
||||
|
||||
if !ensure_subpath(&self.path, &op.path) {
|
||||
return OpState::ErrPathMismatch;
|
||||
}
|
||||
|
||||
// haven't reached end yet, navigate to inner CRDT
|
||||
if op.path.len() - 1 > self.path.len() {
|
||||
if let Some(PathSegment::Index(op_id)) = op.path.get(self.path.len()) {
|
||||
let op_id = op_id.to_owned();
|
||||
if let Some(idx) = self.find_idx(op_id) {
|
||||
if self.ops[idx].content.is_none() {
|
||||
return OpState::ErrListApplyToEmpty;
|
||||
} else {
|
||||
return self.ops[idx].content.as_mut().unwrap().apply(op);
|
||||
}
|
||||
} else {
|
||||
debug_path_mismatch(
|
||||
join_path(self.path.to_owned(), PathSegment::Index(op_id)),
|
||||
op.path,
|
||||
);
|
||||
return OpState::ErrPathMismatch;
|
||||
};
|
||||
} else {
|
||||
debug_path_mismatch(self.path.to_owned(), op.path);
|
||||
return OpState::ErrPathMismatch;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, this is just a direct replacement
|
||||
self.integrate(op.into())
|
||||
}
|
||||
|
||||
/// Main CRDT logic of integrating an op properly into our local log
|
||||
/// without causing conflicts. This is basically a really fancy
|
||||
/// insertion sort.
|
||||
///
|
||||
/// Effectively, we
|
||||
/// 1) find the parent item
|
||||
/// 2) find the right spot to insert before the next node
|
||||
fn integrate(&mut self, new_op: Op<T>) -> OpState {
|
||||
let op_id = new_op.id;
|
||||
let seq = new_op.sequence_num();
|
||||
let origin_id = self.find_idx(new_op.origin);
|
||||
|
||||
if origin_id.is_none() {
|
||||
self.message_q
|
||||
.entry(new_op.origin)
|
||||
.or_default()
|
||||
.push(new_op);
|
||||
return OpState::MissingCausalDependencies;
|
||||
}
|
||||
|
||||
let new_op_parent_idx = origin_id.unwrap();
|
||||
|
||||
// if its a delete operation, we don't need to do much
|
||||
self.log_apply(&new_op);
|
||||
if new_op.is_deleted {
|
||||
let op = &mut self.ops[new_op_parent_idx];
|
||||
op.is_deleted = true;
|
||||
return OpState::Ok;
|
||||
}
|
||||
|
||||
// otherwise, we are in an insert case
|
||||
// start looking from right after parent
|
||||
// stop when we reach end of document
|
||||
let mut i = new_op_parent_idx + 1;
|
||||
while i < self.ops.len() {
|
||||
let op = &self.ops[i];
|
||||
let op_parent_idx = self.find_idx(op.origin).unwrap();
|
||||
|
||||
// idempotency
|
||||
if op.id == new_op.id {
|
||||
return OpState::Ok;
|
||||
}
|
||||
|
||||
// first, lets compare causal origins
|
||||
match new_op_parent_idx.cmp(&op_parent_idx) {
|
||||
Ordering::Greater => break,
|
||||
Ordering::Equal => {
|
||||
// our parents our equal, we are siblings
|
||||
// siblings are sorted first by sequence number then by author id
|
||||
match new_op.sequence_num().cmp(&op.sequence_num()) {
|
||||
Ordering::Greater => break,
|
||||
Ordering::Equal => {
|
||||
// conflict, resolve arbitrarily but deterministically
|
||||
// tie-break on author id as that is unique
|
||||
if new_op.author() > op.author() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ordering::Less => (),
|
||||
}
|
||||
}
|
||||
Ordering::Less => (),
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
// insert at i
|
||||
self.ops.insert(i, new_op);
|
||||
self.our_seq = max(self.our_seq, seq);
|
||||
self.log_ops(Some(op_id));
|
||||
|
||||
// apply all of its causal dependents if there are any
|
||||
let dependent_queue = self.message_q.remove(&op_id);
|
||||
if let Some(mut q) = dependent_queue {
|
||||
for dependent in q.drain(..) {
|
||||
self.integrate(dependent);
|
||||
}
|
||||
}
|
||||
OpState::Ok
|
||||
}
|
||||
|
||||
/// Make an iterator out of list CRDT contents, ignoring deleted items and empty content
|
||||
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||
self.ops
|
||||
.iter()
|
||||
.filter(|op| !op.is_deleted && op.content.is_some())
|
||||
.map(|op| op.content.as_ref().unwrap())
|
||||
}
|
||||
|
||||
/// Convenience function to get a vector of visible list elements
|
||||
pub fn view(&self) -> Vec<T> {
|
||||
self.iter().map(|i| i.to_owned()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"[{}]",
|
||||
self.ops
|
||||
.iter()
|
||||
.map(|op| format!("{:?}", op.id))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Allows us to index into a List CRDT like we would with an array
|
||||
impl<T> Index<usize> for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
type Output = T;
|
||||
fn index(&self, idx: usize) -> &Self::Output {
|
||||
let mut i = 0;
|
||||
for op in &self.ops {
|
||||
if !op.is_deleted && op.content.is_some() {
|
||||
if idx == i {
|
||||
return op.content.as_ref().unwrap();
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
panic!("index {idx} out of range (length of {i})")
|
||||
}
|
||||
}
|
||||
|
||||
/// Allows us to mutably index into a List CRDT like we would with an array
|
||||
impl<T> IndexMut<usize> for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn index_mut(&mut self, idx: usize) -> &mut Self::Output {
|
||||
let mut i = 0;
|
||||
for op in &mut self.ops {
|
||||
if !op.is_deleted && op.content.is_some() {
|
||||
if idx == i {
|
||||
return op.content.as_mut().unwrap();
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
panic!("index {idx} out of range (length of {i})")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CrdtNode for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn apply(&mut self, op: Op<Value>) -> OpState {
|
||||
self.apply(op.into())
|
||||
}
|
||||
|
||||
fn view(&self) -> Value {
|
||||
self.view().into()
|
||||
}
|
||||
|
||||
fn new(id: AuthorId, path: Vec<PathSegment>) -> Self {
|
||||
Self::new(id, path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "logging-base")]
|
||||
use crate::debug::DebugView;
|
||||
#[cfg(feature = "logging-base")]
|
||||
impl<T> DebugView for ListCrdt<T>
|
||||
where
|
||||
T: CrdtNode + DebugView,
|
||||
{
|
||||
fn debug_view(&self, indent: usize) -> String {
|
||||
let spacing = " ".repeat(indent);
|
||||
let path_str = print_path(self.path.clone());
|
||||
let inner = self
|
||||
.ops
|
||||
.iter()
|
||||
.map(|op| {
|
||||
format!(
|
||||
"{spacing}{}: {}",
|
||||
&print_hex(&op.id)[..6],
|
||||
op.debug_view(indent)
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
format!("List CRDT @ /{path_str}\n{inner}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{json_crdt::OpState, keypair::make_author, list_crdt::ListCrdt, op::ROOT_ID};
|
||||
|
||||
#[test]
|
||||
fn test_list_simple() {
|
||||
let mut list = ListCrdt::<i64>::new(make_author(1), vec![]);
|
||||
let _one = list.insert(ROOT_ID, 1);
|
||||
let _two = list.insert(_one.id, 2);
|
||||
let _three = list.insert(_two.id, 3);
|
||||
let _four = list.insert(_one.id, 4);
|
||||
assert_eq!(list.view(), vec![1, 4, 2, 3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_idempotence() {
|
||||
let mut list = ListCrdt::<i64>::new(make_author(1), vec![]);
|
||||
let op = list.insert(ROOT_ID, 1);
|
||||
for _ in 1..10 {
|
||||
assert_eq!(list.apply(op.clone()), OpState::Ok);
|
||||
}
|
||||
assert_eq!(list.view(), vec![1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_delete() {
|
||||
let mut list = ListCrdt::<char>::new(make_author(1), vec![]);
|
||||
let _one = list.insert(ROOT_ID, 'a');
|
||||
let _two = list.insert(_one.id, 'b');
|
||||
let _three = list.insert(ROOT_ID, 'c');
|
||||
list.delete(_one.id);
|
||||
list.delete(_two.id);
|
||||
assert_eq!(list.view(), vec!['c']);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_interweave_chars() {
|
||||
let mut list = ListCrdt::<char>::new(make_author(1), vec![]);
|
||||
let _one = list.insert(ROOT_ID, 'a');
|
||||
let _two = list.insert(_one.id, 'b');
|
||||
let _three = list.insert(ROOT_ID, 'c');
|
||||
assert_eq!(list.view(), vec!['c', 'a', 'b']);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_conflicting_agents() {
|
||||
let mut list1 = ListCrdt::<char>::new(make_author(1), vec![]);
|
||||
let mut list2 = ListCrdt::new(make_author(2), vec![]);
|
||||
let _1_a = list1.insert(ROOT_ID, 'a');
|
||||
assert_eq!(list2.apply(_1_a.clone()), OpState::Ok);
|
||||
let _2_b = list2.insert(_1_a.id, 'b');
|
||||
assert_eq!(list1.apply(_2_b.clone()), OpState::Ok);
|
||||
|
||||
let _2_d = list2.insert(ROOT_ID, 'd');
|
||||
let _2_y = list2.insert(_2_b.id, 'y');
|
||||
let _1_x = list1.insert(_2_b.id, 'x');
|
||||
|
||||
// create artificial delay, then apply out of order
|
||||
assert_eq!(list2.apply(_1_x), OpState::Ok);
|
||||
assert_eq!(list1.apply(_2_y), OpState::Ok);
|
||||
assert_eq!(list1.apply(_2_d), OpState::Ok);
|
||||
|
||||
assert_eq!(list1.view(), vec!['d', 'a', 'b', 'y', 'x']);
|
||||
assert_eq!(list1.view(), list2.view());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_delete_multiple_agent() {
|
||||
let mut list1 = ListCrdt::<char>::new(make_author(1), vec![]);
|
||||
let mut list2 = ListCrdt::new(make_author(2), vec![]);
|
||||
let _1_a = list1.insert(ROOT_ID, 'a');
|
||||
assert_eq!(list2.apply(_1_a.clone()), OpState::Ok);
|
||||
let _2_b = list2.insert(_1_a.id, 'b');
|
||||
let del_1_a = list1.delete(_1_a.id);
|
||||
assert_eq!(list1.apply(_2_b), OpState::Ok);
|
||||
assert_eq!(list2.apply(del_1_a), OpState::Ok);
|
||||
|
||||
assert_eq!(list1.view(), vec!['b']);
|
||||
assert_eq!(list1.view(), list2.view());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_nested() {
|
||||
let mut list1 = ListCrdt::<char>::new(make_author(1), vec![]);
|
||||
let _c = list1.insert(ROOT_ID, 'c');
|
||||
let _a = list1.insert(ROOT_ID, 'a');
|
||||
let _d = list1.insert(_c.id, 'd');
|
||||
let _b = list1.insert(_a.id, 'b');
|
||||
|
||||
assert_eq!(list1.view(), vec!['a', 'b', 'c', 'd']);
|
||||
}
|
||||
}
|
||||
192
crates/bft-json-crdt/src/lww_crdt.rs
Normal file
192
crates/bft-json-crdt/src/lww_crdt.rs
Normal file
@@ -0,0 +1,192 @@
|
||||
use crate::debug::DebugView;
|
||||
use crate::json_crdt::{CrdtNode, OpState, Value};
|
||||
use crate::op::{join_path, print_path, Op, PathSegment, SequenceNumber};
|
||||
use std::cmp::{max, Ordering};
|
||||
use std::fmt::Debug;
|
||||
|
||||
use crate::keypair::AuthorId;
|
||||
|
||||
/// A simple delete-wins, last-writer-wins (LWW) register CRDT.
|
||||
/// Basically only for adding support for primitives within a more complex CRDT
|
||||
#[derive(Clone)]
|
||||
pub struct LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
/// Public key for this node
|
||||
pub our_id: AuthorId,
|
||||
/// Path to this CRDT
|
||||
pub path: Vec<PathSegment>,
|
||||
/// Internal value of this CRDT. We wrap it in an Op to retain the author/sequence metadata
|
||||
value: Op<T>,
|
||||
/// The sequence number of this node
|
||||
our_seq: SequenceNumber,
|
||||
}
|
||||
|
||||
impl<T> LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
/// Create a new register CRDT with the given [`AuthorID`] (it should be unique)
|
||||
pub fn new(id: AuthorId, path: Vec<PathSegment>) -> LwwRegisterCrdt<T> {
|
||||
LwwRegisterCrdt {
|
||||
our_id: id,
|
||||
path,
|
||||
value: Op::make_root(),
|
||||
our_seq: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the current value of the register
|
||||
pub fn set<U: Into<Value>>(&mut self, content: U) -> Op<Value> {
|
||||
let mut op = Op::new(
|
||||
self.value.id,
|
||||
self.our_id,
|
||||
self.our_seq + 1,
|
||||
false,
|
||||
Some(content.into()),
|
||||
self.path.to_owned(),
|
||||
);
|
||||
|
||||
// we need to know the op ID before setting the path as [`PathSegment::Index`] requires an
|
||||
// [`OpID`]
|
||||
let new_path = join_path(self.path.to_owned(), PathSegment::Index(op.id));
|
||||
op.path = new_path;
|
||||
self.apply(op.clone());
|
||||
op
|
||||
}
|
||||
|
||||
/// Apply an operation (both local and remote) to this local register CRDT.
|
||||
pub fn apply(&mut self, op: Op<Value>) -> OpState {
|
||||
if !op.is_valid_hash() {
|
||||
return OpState::ErrHashMismatch;
|
||||
}
|
||||
|
||||
let op: Op<T> = op.into();
|
||||
let seq = op.sequence_num();
|
||||
|
||||
// take most recent update by sequence number
|
||||
match seq.cmp(&self.our_seq) {
|
||||
Ordering::Greater => {
|
||||
self.value = Op {
|
||||
id: self.value.id,
|
||||
..op
|
||||
};
|
||||
}
|
||||
Ordering::Equal => {
|
||||
// if we are equal, tie break on author
|
||||
if op.author() < self.value.author() {
|
||||
// we want to keep id constant so replace everything but id
|
||||
self.value = Op {
|
||||
id: self.value.id,
|
||||
..op
|
||||
};
|
||||
}
|
||||
}
|
||||
Ordering::Less => {} // LWW, ignore if its outdate
|
||||
};
|
||||
|
||||
// update bookkeeping
|
||||
self.our_seq = max(self.our_seq, seq);
|
||||
OpState::Ok
|
||||
}
|
||||
|
||||
fn view(&self) -> Option<T> {
|
||||
self.value.content.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CrdtNode for LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn apply(&mut self, op: Op<Value>) -> OpState {
|
||||
self.apply(op.into())
|
||||
}
|
||||
|
||||
fn view(&self) -> Value {
|
||||
self.view().into()
|
||||
}
|
||||
|
||||
fn new(id: AuthorId, path: Vec<PathSegment>) -> Self {
|
||||
Self::new(id, path)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DebugView for LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode + DebugView,
|
||||
{
|
||||
fn debug_view(&self, indent: usize) -> String {
|
||||
let spacing = " ".repeat(indent);
|
||||
let path_str = print_path(self.path.clone());
|
||||
let inner = self.value.debug_view(indent + 2);
|
||||
format!("LWW Register CRDT @ /{path_str}\n{spacing}{inner}")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for LwwRegisterCrdt<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self.value.id)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::LwwRegisterCrdt;
|
||||
use crate::{json_crdt::OpState, keypair::make_author};
|
||||
|
||||
#[test]
|
||||
fn test_lww_simple() {
|
||||
let mut register = LwwRegisterCrdt::new(make_author(1), vec![]);
|
||||
assert_eq!(register.view(), None);
|
||||
register.set(1);
|
||||
assert_eq!(register.view(), Some(1));
|
||||
register.set(99);
|
||||
assert_eq!(register.view(), Some(99));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lww_multiple_writer() {
|
||||
let mut register1 = LwwRegisterCrdt::new(make_author(1), vec![]);
|
||||
let mut register2 = LwwRegisterCrdt::new(make_author(2), vec![]);
|
||||
let _a = register1.set('a');
|
||||
let _b = register1.set('b');
|
||||
let _c = register2.set('c');
|
||||
assert_eq!(register2.view(), Some('c'));
|
||||
assert_eq!(register1.apply(_c), OpState::Ok);
|
||||
assert_eq!(register2.apply(_b), OpState::Ok);
|
||||
assert_eq!(register2.apply(_a), OpState::Ok);
|
||||
assert_eq!(register1.view(), Some('b'));
|
||||
assert_eq!(register2.view(), Some('b'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lww_idempotence() {
|
||||
let mut register = LwwRegisterCrdt::new(make_author(1), vec![]);
|
||||
let op = register.set(1);
|
||||
for _ in 1..10 {
|
||||
assert_eq!(register.apply(op.clone()), OpState::Ok);
|
||||
}
|
||||
assert_eq!(register.view(), Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lww_consistent_tiebreak() {
|
||||
let mut register1 = LwwRegisterCrdt::new(make_author(1), vec![]);
|
||||
let mut register2 = LwwRegisterCrdt::new(make_author(2), vec![]);
|
||||
let _a = register1.set('a');
|
||||
let _b = register2.set('b');
|
||||
assert_eq!(register1.apply(_b), OpState::Ok);
|
||||
assert_eq!(register2.apply(_a), OpState::Ok);
|
||||
let _c = register1.set('c');
|
||||
let _d = register2.set('d');
|
||||
assert_eq!(register2.apply(_c), OpState::Ok);
|
||||
assert_eq!(register1.apply(_d), OpState::Ok);
|
||||
assert_eq!(register1.view(), register2.view());
|
||||
assert_eq!(register1.view(), Some('c'));
|
||||
}
|
||||
}
|
||||
237
crates/bft-json-crdt/src/op.rs
Normal file
237
crates/bft-json-crdt/src/op.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
use crate::debug::{debug_path_mismatch, debug_type_mismatch};
|
||||
use crate::json_crdt::{CrdtNode, CrdtNodeFromValue, IntoCrdtNode, SignedOp, Value};
|
||||
use crate::keypair::{sha256, AuthorId};
|
||||
use fastcrypto::ed25519::Ed25519KeyPair;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// A lamport clock timestamp. Used to track document versions
|
||||
pub type SequenceNumber = u64;
|
||||
|
||||
/// A unique ID for a single [`Op<T>`]
|
||||
pub type OpId = [u8; 32];
|
||||
|
||||
/// The root/sentinel op
|
||||
pub const ROOT_ID: OpId = [0u8; 32];
|
||||
|
||||
/// Part of a path to get to a specific CRDT in a nested CRDT
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub enum PathSegment {
|
||||
Field(String),
|
||||
Index(OpId),
|
||||
}
|
||||
|
||||
/// Format a byte array as a hex string
|
||||
pub fn print_hex<const N: usize>(bytes: &[u8; N]) -> String {
|
||||
bytes
|
||||
.iter()
|
||||
.map(|byte| format!("{byte:02x}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("")
|
||||
}
|
||||
|
||||
/// Pretty print a path
|
||||
pub fn print_path(path: Vec<PathSegment>) -> String {
|
||||
path.iter()
|
||||
.map(|p| match p {
|
||||
PathSegment::Field(s) => s.to_string(),
|
||||
PathSegment::Index(i) => print_hex(i)[..6].to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(".")
|
||||
}
|
||||
|
||||
/// Ensure our_path is a subpath of op_path. Note that two identical paths are considered subpaths
|
||||
/// of each other.
|
||||
pub fn ensure_subpath(our_path: &Vec<PathSegment>, op_path: &Vec<PathSegment>) -> bool {
|
||||
// if our_path is longer, it cannot be a subpath
|
||||
if our_path.len() > op_path.len() {
|
||||
debug_path_mismatch(our_path.to_owned(), op_path.to_owned());
|
||||
return false;
|
||||
}
|
||||
|
||||
// iterate to end of our_path, ensuring each element is the same
|
||||
for i in 0..our_path.len() {
|
||||
let ours = our_path.get(i);
|
||||
let theirs = op_path.get(i);
|
||||
if ours != theirs {
|
||||
debug_path_mismatch(our_path.to_owned(), op_path.to_owned());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
/// Helper to easily append a [`PathSegment`] to a path
|
||||
pub fn join_path(path: Vec<PathSegment>, segment: PathSegment) -> Vec<PathSegment> {
|
||||
let mut p = path;
|
||||
p.push(segment);
|
||||
p
|
||||
}
|
||||
|
||||
/// Parse out the field from a [`PathSegment`]
|
||||
pub fn parse_field(path: Vec<PathSegment>) -> Option<String> {
|
||||
path.last().and_then(|segment| {
|
||||
if let PathSegment::Field(key) = segment {
|
||||
Some(key.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Represents a single node in a CRDT
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct Op<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
pub origin: OpId,
|
||||
pub author: AuthorId, // pub key of author
|
||||
pub seq: SequenceNumber,
|
||||
pub content: Option<T>,
|
||||
pub path: Vec<PathSegment>, // path to get to target CRDT
|
||||
pub is_deleted: bool,
|
||||
pub id: OpId, // hash of the operation
|
||||
}
|
||||
|
||||
/// Something can be turned into a string. This allows us to use [`content`] as in
|
||||
/// input into the SHA256 hash
|
||||
pub trait Hashable {
|
||||
fn hash(&self) -> String;
|
||||
}
|
||||
|
||||
/// Anything that implements Debug is trivially hashable
|
||||
impl<T> Hashable for T
|
||||
where
|
||||
T: Debug,
|
||||
{
|
||||
fn hash(&self) -> String {
|
||||
format!("{self:?}")
|
||||
}
|
||||
}
|
||||
|
||||
/// Conversion from Op<Value> -> Op<T> given that T is a CRDT that can be created from a JSON value
|
||||
impl Op<Value> {
|
||||
pub fn into<T: CrdtNodeFromValue + CrdtNode>(self) -> Op<T> {
|
||||
let content = if let Some(inner_content) = self.content {
|
||||
match inner_content.into_node(self.id, self.path.clone()) {
|
||||
Ok(node) => Some(node),
|
||||
Err(msg) => {
|
||||
debug_type_mismatch(msg);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Op {
|
||||
content,
|
||||
origin: self.origin,
|
||||
author: self.author,
|
||||
seq: self.seq,
|
||||
path: self.path,
|
||||
is_deleted: self.is_deleted,
|
||||
id: self.id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Op<T>
|
||||
where
|
||||
T: CrdtNode,
|
||||
{
|
||||
pub fn sign(self, keypair: &Ed25519KeyPair) -> SignedOp {
|
||||
SignedOp::from_op(self, keypair, vec![])
|
||||
}
|
||||
|
||||
pub fn sign_with_dependencies(
|
||||
self,
|
||||
keypair: &Ed25519KeyPair,
|
||||
dependencies: Vec<&SignedOp>,
|
||||
) -> SignedOp {
|
||||
SignedOp::from_op(
|
||||
self,
|
||||
keypair,
|
||||
dependencies
|
||||
.iter()
|
||||
.map(|dep| dep.signed_digest)
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn author(&self) -> AuthorId {
|
||||
self.author
|
||||
}
|
||||
|
||||
pub fn sequence_num(&self) -> SequenceNumber {
|
||||
self.seq
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
origin: OpId,
|
||||
author: AuthorId,
|
||||
seq: SequenceNumber,
|
||||
is_deleted: bool,
|
||||
content: Option<T>,
|
||||
path: Vec<PathSegment>,
|
||||
) -> Op<T> {
|
||||
let mut op = Self {
|
||||
origin,
|
||||
id: ROOT_ID,
|
||||
author,
|
||||
seq,
|
||||
is_deleted,
|
||||
content,
|
||||
path,
|
||||
};
|
||||
op.id = op.hash_to_id();
|
||||
op
|
||||
}
|
||||
|
||||
/// Generate OpID by hashing our contents. Hash includes
|
||||
/// - content
|
||||
/// - origin
|
||||
/// - author
|
||||
/// - seq
|
||||
/// - is_deleted
|
||||
pub fn hash_to_id(&self) -> OpId {
|
||||
let content_str = match self.content.as_ref() {
|
||||
Some(content) => content.hash(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
let fmt_str = format!(
|
||||
"{:?},{:?},{:?},{:?},{content_str}",
|
||||
self.origin, self.author, self.seq, self.is_deleted,
|
||||
);
|
||||
sha256(fmt_str)
|
||||
}
|
||||
|
||||
/// Rehashes the contents to make sure it matches the ID
|
||||
pub fn is_valid_hash(&self) -> bool {
|
||||
// make sure content is only none for deletion events
|
||||
if self.content.is_none() && !self.is_deleted {
|
||||
return false;
|
||||
}
|
||||
|
||||
// try to avoid expensive sig check if early fail
|
||||
let res = self.hash_to_id() == self.id;
|
||||
if !res {
|
||||
self.debug_hash_failure();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Special constructor for defining the sentinel root node
|
||||
pub fn make_root() -> Op<T> {
|
||||
Self {
|
||||
origin: ROOT_ID,
|
||||
id: ROOT_ID,
|
||||
author: [0u8; 32],
|
||||
seq: 0,
|
||||
is_deleted: false,
|
||||
content: None,
|
||||
path: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user