huskies: merge 1009

This commit is contained in:
dave
2026-05-13 22:50:13 +00:00
parent a5cd3a2152
commit 4e007bb770
56 changed files with 453 additions and 384 deletions
+30 -21
View File
@@ -11,7 +11,7 @@ use serde_json::json;
use super::super::state::{apply_and_persist, emit_event, get_crdt, rebuild_index};
use super::super::types::CrdtEvent;
use crate::io::story_metadata::QaMode;
use crate::pipeline_state::{Stage, stage_dir_name};
use crate::pipeline_state::{AgentClaim, Stage, stage_dir_name};
/// Set the typed `depends_on` CRDT register for a pipeline item.
///
@@ -221,7 +221,6 @@ pub fn set_qa_mode(story_id: &str, mode: Option<QaMode>) -> bool {
///
/// `stage` is the typed pipeline state; it is serialised to the canonical
/// clean wire form (story 934) via [`stage_dir_name`] at the CRDT boundary.
#[allow(clippy::too_many_arguments)]
pub fn write_item(
story_id: &str,
stage: &Stage,
@@ -229,11 +228,14 @@ pub fn write_item(
agent: Option<&str>,
retry_count: Option<i64>,
depends_on: Option<&str>,
claimed_by: Option<&str>,
claimed_at: Option<f64>,
merged_at: Option<f64>,
) {
let stage_str = stage_dir_name(stage);
let claim: Option<&AgentClaim> = match stage {
Stage::Coding { claim } => claim.as_ref(),
Stage::Merge { claim, .. } => claim.as_ref(),
_ => None,
};
let Some(state_mutex) = get_crdt() else {
return;
};
@@ -291,14 +293,19 @@ pub fn write_item(
s.crdt.doc.items[idx].depends_on.set(d.to_string())
});
}
if let Some(cb) = claimed_by {
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claimed_by.set(cb.to_string())
});
}
if let Some(ca) = claimed_at {
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claimed_at.set(ca));
}
let (claim_agent_str, claim_ts_val) = match claim {
Some(c) => (
c.agent.0.as_str().to_string(),
c.claimed_at.timestamp() as f64,
),
None => (String::new(), 0.0),
};
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_agent.set(claim_agent_str)
});
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_ts.set(claim_ts_val)
});
if let Some(ma) = merged_at {
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].merged_at.set(ma));
}
@@ -322,6 +329,13 @@ pub fn write_item(
}
} else {
// Insert new item.
let (insert_claim_agent, insert_claim_ts) = match claim {
Some(c) => (
c.agent.0.as_str().to_string(),
c.claimed_at.timestamp() as f64,
),
None => (String::new(), 0.0),
};
let item_json: JsonValue = json!({
"story_id": story_id,
"stage": stage_str,
@@ -329,8 +343,8 @@ pub fn write_item(
"agent": agent.unwrap_or(""),
"retry_count": retry_count.unwrap_or(0) as f64,
"depends_on": depends_on.unwrap_or(""),
"claimed_by": claimed_by.unwrap_or(""),
"claimed_at": claimed_at.unwrap_or(0.0),
"claim_agent": insert_claim_agent,
"claim_ts": insert_claim_ts,
"merged_at": merged_at.unwrap_or(0.0),
"qa_mode": "",
"item_type": "",
@@ -357,8 +371,8 @@ pub fn write_item(
item.agent.advance_seq(floor);
item.retry_count.advance_seq(floor);
item.depends_on.advance_seq(floor);
item.claimed_by.advance_seq(floor);
item.claimed_at.advance_seq(floor);
item.claim_agent.advance_seq(floor);
item.claim_ts.advance_seq(floor);
item.merged_at.advance_seq(floor);
item.qa_mode.advance_seq(floor);
item.item_type.advance_seq(floor);
@@ -384,7 +398,6 @@ pub fn write_item(
/// Stages are normalised through [`Stage::from_dir`]: unknown strings cause
/// the write to be skipped (with a log line).
#[cfg(test)]
#[allow(clippy::too_many_arguments)]
pub fn write_item_str(
story_id: &str,
stage: &str,
@@ -392,8 +405,6 @@ pub fn write_item_str(
agent: Option<&str>,
retry_count: Option<i64>,
depends_on: Option<&str>,
claimed_by: Option<&str>,
claimed_at: Option<f64>,
merged_at: Option<f64>,
) {
// Normalise pre-934 directory-style strings to clean wire form so
@@ -423,8 +434,6 @@ pub fn write_item_str(
agent,
retry_count,
depends_on,
claimed_by,
claimed_at,
merged_at,
);
}
+60 -8
View File
@@ -276,6 +276,57 @@ pub fn migrate_legacy_stage_strings() {
);
}
/// Clear legacy node-hex claims from `claim_agent` and `claim_ts` registers.
///
/// Pre-1009 nodes wrote the Ed25519 hex pubkey as `claimed_by`. That value
/// cannot be converted to an `AgentName`, so the safe migration is to wipe
/// any existing claim rather than carry over a semantically invalid string.
///
/// Only clears entries where `claim_agent` looks like a legacy node hex value
/// (64 hex chars). Entries that are already empty or contain an agent-name
/// string (shorter, mixed case) are left untouched.
pub fn migrate_node_claims_to_agent_claims() {
let Some(state_mutex) = get_crdt() else {
return;
};
let stale_indices: Vec<usize> = {
let Ok(state) = state_mutex.lock() else {
return;
};
state
.index
.values()
.copied()
.filter(|&idx| {
let item = &state.crdt.doc.items[idx];
match item.claim_agent.view() {
JsonValue::String(s) => {
s.len() == 64 && s.chars().all(|c| c.is_ascii_hexdigit())
}
_ => false,
}
})
.collect()
};
if stale_indices.is_empty() {
return;
}
let Ok(mut state) = state_mutex.lock() else {
return;
};
let count = stale_indices.len();
for idx in stale_indices {
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_agent.set(String::new())
});
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claim_ts.set(0.0));
}
slog!("[crdt] Cleared {count} legacy node-hex claim(s) from claim_agent/claim_ts");
}
#[cfg(test)]
mod stage_migration_tests {
use super::super::super::state::init_for_test;
@@ -299,8 +350,6 @@ mod stage_migration_tests {
None,
None,
None,
None,
None,
);
// Then overwrite the stage register with the raw legacy string,
// bypassing `db::normalise_stage_str` / `write_item_str`'s mapping.
@@ -318,7 +367,11 @@ mod stage_migration_tests {
let cases: &[(&str, &str, Stage)] = &[
("9501_legacy_upcoming", "0_upcoming", Stage::Upcoming),
("9502_legacy_backlog", "1_backlog", Stage::Backlog),
("9503_legacy_coding", "2_current", Stage::Coding),
(
"9503_legacy_coding",
"2_current",
Stage::Coding { claim: None },
),
(
"9504_legacy_blocked",
"2_blocked",
@@ -333,6 +386,7 @@ mod stage_migration_tests {
Stage::Merge {
feature_branch: BranchName(String::new()),
commits_ahead: NonZeroU32::new(1).unwrap(),
claim: None,
},
),
(
@@ -398,14 +452,12 @@ mod stage_migration_tests {
// Seed two items: one already in clean form, one in legacy form.
write_item(
"9520_already_clean",
&Stage::Coding,
&Stage::Coding { claim: None },
Some("Already Clean"),
None,
None,
None,
None,
None,
None,
);
seed_with_raw_stage("9521_needs_migration", "2_current");
@@ -416,11 +468,11 @@ mod stage_migration_tests {
let migrated = read_item("9521_needs_migration").unwrap();
assert!(matches!(
clean.stage(),
crate::pipeline_state::Stage::Coding
crate::pipeline_state::Stage::Coding { .. }
));
assert!(matches!(
migrated.stage(),
crate::pipeline_state::Stage::Coding
crate::pipeline_state::Stage::Coding { .. }
));
}
+1 -1
View File
@@ -18,5 +18,5 @@ pub use item::{
pub use item::write_item_str;
pub use migrations::{
migrate_legacy_stage_strings, migrate_merge_job, migrate_names_from_slugs,
migrate_story_ids_to_numeric, name_from_story_id,
migrate_node_claims_to_agent_claims, migrate_story_ids_to_numeric, name_from_story_id,
};
+5 -34
View File
@@ -98,8 +98,6 @@ fn migrate_story_ids_to_numeric_rewrites_slug_ids() {
None,
None,
None,
None,
None,
);
let result = migrate_story_ids_to_numeric();
@@ -130,8 +128,6 @@ fn migrate_story_ids_to_numeric_is_idempotent() {
None,
None,
None,
None,
None,
);
// First call — nothing to migrate.
@@ -159,8 +155,6 @@ fn migrate_story_ids_to_numeric_skips_conflict() {
None,
None,
None,
None,
None,
);
write_item_str(
"44",
@@ -170,8 +164,6 @@ fn migrate_story_ids_to_numeric_skips_conflict() {
None,
None,
None,
None,
None,
);
let result = migrate_story_ids_to_numeric();
@@ -204,14 +196,15 @@ fn migrate_story_ids_to_numeric_preserves_stage_and_name() {
None,
None,
None,
None,
None,
);
migrate_story_ids_to_numeric();
let item = read_item("45").expect("item must be accessible by numeric ID");
assert!(matches!(item.stage, crate::pipeline_state::Stage::Coding));
assert!(matches!(
item.stage,
crate::pipeline_state::Stage::Coding { .. }
));
assert_eq!(item.name, "Crash Bug");
assert_eq!(item.agent.map(|a| a.as_str()), Some("coder-1"));
}
@@ -229,8 +222,6 @@ fn migrate_names_from_slugs_fills_empty_names() {
None,
None,
None,
None,
None,
);
// Before migration: nameless item is filtered by read_item (AC 5).
@@ -261,8 +252,6 @@ fn migrate_names_from_slugs_leaves_existing_names_unchanged() {
None,
None,
None,
None,
None,
);
migrate_names_from_slugs();
@@ -297,8 +286,6 @@ fn set_depends_on_round_trip_and_clear() {
None,
None,
None,
None,
None,
);
// Set depends_on to [837] and verify CRDT register holds the list.
@@ -352,8 +339,6 @@ fn set_agent_some_writes_name() {
None,
None,
None,
None,
None,
);
let found = set_agent(
@@ -382,8 +367,6 @@ fn set_agent_none_clears_register() {
None,
None,
None,
None,
None,
);
// Confirm agent is set.
@@ -430,8 +413,6 @@ fn set_qa_mode_round_trip_server_then_human() {
None,
None,
None,
None,
None,
);
// Set qa=server via typed path and assert CRDT register reflects it.
@@ -485,8 +466,6 @@ fn set_qa_mode_round_trip_all_variants() {
None,
None,
None,
None,
None,
);
for mode in [QaMode::Server, QaMode::Agent, QaMode::Human] {
@@ -523,8 +502,6 @@ fn bump_retry_count_increments_by_one() {
None,
None,
None,
None,
None,
);
let v1 = bump_retry_count("9001_story_bump_test");
@@ -548,8 +525,6 @@ fn set_retry_count_resets_to_zero() {
Some(5),
None,
None,
None,
None,
);
set_retry_count("9002_story_set_test", 0);
@@ -666,7 +641,7 @@ async fn bug_511_rowid_replay_preserves_field_update_after_list_insert() {
let idx2 = index2["511_story_target"];
let view = extract_item_view(&crdt2.doc.items[idx2]).unwrap();
assert!(
matches!(view.stage, crate::pipeline_state::Stage::Coding),
matches!(view.stage, crate::pipeline_state::Stage::Coding { .. }),
"stage field update lost during replay (bug 511 regression)"
);
@@ -727,8 +702,6 @@ async fn tombstone_survives_concurrent_writes() {
None,
None,
None,
None,
None,
);
assert!(
read_item(story_id).is_some(),
@@ -748,8 +721,6 @@ async fn tombstone_survives_concurrent_writes() {
None,
None,
None,
None,
None,
);
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
}