huskies: merge 764

This commit is contained in:
dave
2026-04-28 09:49:57 +00:00
parent 3d986a733b
commit d2d5ef8afa
5 changed files with 1147 additions and 6 deletions
+96 -1
View File
@@ -61,6 +61,16 @@ pub(super) struct CrdtState {
pub(super) index: HashMap<String, usize>,
/// Maps node_id (hex) → index in the nodes ListCrdt for O(1) lookup.
pub(super) node_index: HashMap<String, usize>,
/// Maps agent_id → index in the tokens ListCrdt for O(1) lookup.
pub(super) token_index: HashMap<String, usize>,
/// Maps story_id → index in the merge_jobs ListCrdt for O(1) lookup.
pub(super) merge_job_index: HashMap<String, usize>,
/// Maps agent_id → index in the active_agents ListCrdt for O(1) lookup.
pub(super) active_agent_index: HashMap<String, usize>,
/// Maps story_id → index in the test_jobs ListCrdt for O(1) lookup.
pub(super) test_job_index: HashMap<String, usize>,
/// Maps node_id → index in the agent_throttle ListCrdt for O(1) lookup.
pub(super) agent_throttle_index: HashMap<String, usize>,
/// Channel sender for fire-and-forget op persistence.
pub(super) persist_tx: mpsc::UnboundedSender<SignedOp>,
/// Max sequence number seen across all ops during init() replay.
@@ -146,11 +156,21 @@ pub async fn init(db_path: &Path) -> Result<(), sqlx::Error> {
// Build the indices from the reconstructed state.
let index = rebuild_index(&crdt);
let node_index = rebuild_node_index(&crdt);
let token_index = rebuild_token_index(&crdt);
let merge_job_index = rebuild_merge_job_index(&crdt);
let active_agent_index = rebuild_active_agent_index(&crdt);
let test_job_index = rebuild_test_job_index(&crdt);
let agent_throttle_index = rebuild_agent_throttle_index(&crdt);
// Advance the top-level list clocks to the Lamport floor so that
// list-level inserts (new items / new nodes) don't re-emit low seq numbers.
// list-level inserts don't re-emit low seq numbers.
crdt.doc.items.advance_seq(lamport_floor);
crdt.doc.nodes.advance_seq(lamport_floor);
crdt.doc.tokens.advance_seq(lamport_floor);
crdt.doc.merge_jobs.advance_seq(lamport_floor);
crdt.doc.active_agents.advance_seq(lamport_floor);
crdt.doc.test_jobs.advance_seq(lamport_floor);
crdt.doc.agent_throttle.advance_seq(lamport_floor);
slog!(
"[crdt] Initialised: {} ops replayed, {} items indexed, {} nodes indexed, lamport_floor={}",
@@ -199,6 +219,11 @@ pub async fn init(db_path: &Path) -> Result<(), sqlx::Error> {
keypair,
index,
node_index,
token_index,
merge_job_index,
active_agent_index,
test_job_index,
agent_throttle_index,
persist_tx,
lamport_floor,
};
@@ -237,6 +262,11 @@ pub fn init_for_test() {
keypair,
index: HashMap::new(),
node_index: HashMap::new(),
token_index: HashMap::new(),
merge_job_index: HashMap::new(),
active_agent_index: HashMap::new(),
test_job_index: HashMap::new(),
agent_throttle_index: HashMap::new(),
persist_tx,
lamport_floor: 0,
};
@@ -296,6 +326,61 @@ pub(super) fn rebuild_node_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String
map
}
/// Rebuild the agent_id → tokens list index.
pub(super) fn rebuild_token_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String, usize> {
let mut map = HashMap::new();
for (i, entry) in crdt.doc.tokens.iter().enumerate() {
if let JsonValue::String(ref k) = entry.agent_id.view() {
map.insert(k.clone(), i);
}
}
map
}
/// Rebuild the story_id → merge_jobs list index.
pub(super) fn rebuild_merge_job_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String, usize> {
let mut map = HashMap::new();
for (i, entry) in crdt.doc.merge_jobs.iter().enumerate() {
if let JsonValue::String(ref k) = entry.story_id.view() {
map.insert(k.clone(), i);
}
}
map
}
/// Rebuild the agent_id → active_agents list index.
pub(super) fn rebuild_active_agent_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String, usize> {
let mut map = HashMap::new();
for (i, entry) in crdt.doc.active_agents.iter().enumerate() {
if let JsonValue::String(ref k) = entry.agent_id.view() {
map.insert(k.clone(), i);
}
}
map
}
/// Rebuild the story_id → test_jobs list index.
pub(super) fn rebuild_test_job_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String, usize> {
let mut map = HashMap::new();
for (i, entry) in crdt.doc.test_jobs.iter().enumerate() {
if let JsonValue::String(ref k) = entry.story_id.view() {
map.insert(k.clone(), i);
}
}
map
}
/// Rebuild the node_id → agent_throttle list index.
pub(super) fn rebuild_agent_throttle_index(crdt: &BaseCrdt<PipelineDoc>) -> HashMap<String, usize> {
let mut map = HashMap::new();
for (i, entry) in crdt.doc.agent_throttle.iter().enumerate() {
if let JsonValue::String(ref k) = entry.node_id.view() {
map.insert(k.clone(), i);
}
}
map
}
// ── Write path ───────────────────────────────────────────────────────
/// Create a CRDT op via `op_fn`, sign it, apply it, and send it to the
@@ -503,6 +588,11 @@ mod tests {
keypair: kp,
index: HashMap::new(),
node_index: HashMap::new(),
token_index: HashMap::new(),
merge_job_index: HashMap::new(),
active_agent_index: HashMap::new(),
test_job_index: HashMap::new(),
agent_throttle_index: HashMap::new(),
persist_tx,
lamport_floor: 0,
};
@@ -570,6 +660,11 @@ mod tests {
keypair: kp,
index: HashMap::new(),
node_index: HashMap::new(),
token_index: HashMap::new(),
merge_job_index: HashMap::new(),
active_agent_index: HashMap::new(),
test_job_index: HashMap::new(),
agent_throttle_index: HashMap::new(),
persist_tx,
lamport_floor: 0,
};