huskies: merge 1009

This commit is contained in:
dave
2026-05-13 22:50:13 +00:00
parent a5cd3a2152
commit 4e007bb770
56 changed files with 453 additions and 384 deletions
+5 -5
View File
@@ -47,16 +47,16 @@ pub use read::{
};
pub use state::{init, subscribe};
pub use types::{
ActiveAgentCrdt, ActiveAgentView, AgentThrottleCrdt, AgentThrottleView, Claim, CrdtEvent,
EpicId, GatewayConfigCrdt, GatewayProjectCrdt, GatewayProjectView, MergeJobCrdt, MergeJobView,
ActiveAgentCrdt, ActiveAgentView, AgentThrottleCrdt, AgentThrottleView, CrdtEvent, EpicId,
GatewayConfigCrdt, GatewayProjectCrdt, GatewayProjectView, MergeJobCrdt, MergeJobView,
NodePresenceCrdt, NodePresenceView, PipelineDoc, PipelineItemCrdt, PipelineItemView,
TestJobCrdt, TestJobView, TokenUsageCrdt, TokenUsageView, WorkItem,
};
pub use write::{
bump_retry_count, migrate_legacy_stage_strings, migrate_merge_job, migrate_names_from_slugs,
migrate_story_ids_to_numeric, name_from_story_id, set_agent, set_depends_on, set_epic,
set_item_type, set_name, set_qa_mode, set_resume_to, set_resume_to_raw, set_retry_count,
write_item,
migrate_node_claims_to_agent_claims, migrate_story_ids_to_numeric, name_from_story_id,
set_agent, set_depends_on, set_epic, set_item_type, set_name, set_qa_mode, set_resume_to,
set_resume_to_raw, set_retry_count, write_item,
};
#[cfg(test)]
-2
View File
@@ -550,8 +550,6 @@ mod tests {
None,
None,
None,
None,
None,
);
assert!(
read_item(story_id).is_none(),
+12 -7
View File
@@ -51,7 +51,7 @@ pub fn sign_versioned_challenge(nonce: &str) -> Option<(String, String)> {
/// Write a claim on a pipeline item via CRDT.
///
/// Sets `claimed_by` to this node's ID and `claimed_at` to the current time.
/// Sets `claim_agent` to this node's ID and `claim_ts` to the current time.
/// The LWW register ensures deterministic conflict resolution — if two nodes
/// claim the same item simultaneously, both will converge to the same winner
/// after CRDT sync.
@@ -76,14 +76,14 @@ pub fn write_claim(story_id: &str) -> bool {
};
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claimed_by.set(node_id.clone())
s.crdt.doc.items[idx].claim_agent.set(node_id.clone())
});
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claimed_at.set(now));
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claim_ts.set(now));
true
}
/// Release a claim on a pipeline item (clear claimed_by and claimed_at).
/// Release a claim on a pipeline item (clear claim_agent and claim_ts).
pub fn release_claim(story_id: &str) {
let Some(state_mutex) = get_crdt() else {
return;
@@ -96,9 +96,9 @@ pub fn release_claim(story_id: &str) {
};
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claimed_by.set(String::new())
s.crdt.doc.items[idx].claim_agent.set(String::new())
});
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claimed_at.set(0.0));
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claim_ts.set(0.0));
}
/// Check if this node currently holds the claim on a pipeline item.
@@ -109,7 +109,12 @@ pub fn is_claimed_by_us(story_id: &str) -> bool {
let Some(item) = read_item(story_id) else {
return false;
};
item.claim().is_some_and(|c| c.node == node_id)
let claim = match item.stage() {
crate::pipeline_state::Stage::Coding { claim } => claim.as_ref(),
crate::pipeline_state::Stage::Merge { claim, .. } => claim.as_ref(),
_ => None,
};
claim.is_some_and(|c| c.agent.0 == node_id)
}
/// Write or update a node presence entry in the CRDT.
+47 -25
View File
@@ -22,8 +22,10 @@ pub struct CrdtItemDump {
pub agent: Option<String>,
pub retry_count: Option<i64>,
pub depends_on: Option<Vec<u32>>,
pub claimed_by: Option<String>,
pub claimed_at: Option<f64>,
/// Agent name holding the claim, or `None` when unclaimed.
pub claim_agent: Option<String>,
/// Unix timestamp (seconds) when the claim was written.
pub claim_ts: Option<f64>,
/// Hex-encoded OpId of the list insert op — cross-reference with `crdt_ops`.
pub content_index: String,
pub is_deleted: bool,
@@ -139,11 +141,11 @@ pub fn dump_crdt_state(story_id_filter: Option<&str>) -> CrdtStateDump {
_ => None,
};
let claimed_by = match item_crdt.claimed_by.view() {
let claim_agent = match item_crdt.claim_agent.view() {
JsonValue::String(s) if !s.is_empty() => Some(s),
_ => None,
};
let claimed_at = match item_crdt.claimed_at.view() {
let claim_ts = match item_crdt.claim_ts.view() {
JsonValue::Number(n) if n > 0.0 => Some(n),
_ => None,
};
@@ -157,8 +159,8 @@ pub fn dump_crdt_state(story_id_filter: Option<&str>) -> CrdtStateDump {
agent,
retry_count,
depends_on,
claimed_by,
claimed_at,
claim_agent,
claim_ts,
content_index,
is_deleted: op.is_deleted,
});
@@ -326,7 +328,7 @@ pub fn evict_item(story_id: &str) -> Result<(), String> {
/// string, or with no name set, are filtered out (`None`) — a nameless item
/// is treated as malformed and never surfaces to callers.
pub(super) fn extract_item_view(item: &PipelineItemCrdt) -> Option<PipelineItemView> {
use super::types::{Claim, EpicId};
use super::types::EpicId;
use crate::io::story_metadata::{ItemType, QaMode};
let story_id = match item.story_id.view() {
@@ -357,18 +359,17 @@ pub(super) fn extract_item_view(item: &PipelineItemCrdt) -> Option<PipelineItemV
_ => Vec::new(),
};
let claimed_by = match item.claimed_by.view() {
// `claim_agent`/`claim_ts` are read only to embed in Stage::Coding /
// Stage::Merge via `project_stage_for_view`; they are not stored on
// `WorkItem` directly (story 1009: readers project from the Stage variant).
let claim_agent = match item.claim_agent.view() {
JsonValue::String(s) if !s.is_empty() => Some(s),
_ => None,
};
let claimed_at_secs = match item.claimed_at.view() {
let claim_ts_secs = match item.claim_ts.view() {
JsonValue::Number(n) if n > 0.0 => Some(n as u64),
_ => None,
};
let claim = match (claimed_by, claimed_at_secs) {
(Some(node), Some(at)) => Some(Claim { node, at }),
_ => None,
};
// `merged_at` is read only to project into `Stage::Done`; it is not
// stored on `WorkItem` (callers access it via `Stage::Done { merged_at }`).
@@ -397,8 +398,14 @@ pub(super) fn extract_item_view(item: &PipelineItemCrdt) -> Option<PipelineItemV
_ => None,
};
let stage =
project_stage_for_view(&stage_str, &story_id, merged_at_float, resume_to.as_deref())?;
let stage = project_stage_for_view(
&stage_str,
&story_id,
merged_at_float,
resume_to.as_deref(),
claim_agent.as_deref(),
claim_ts_secs,
)?;
Some(PipelineItemView {
story_id,
@@ -407,7 +414,6 @@ pub(super) fn extract_item_view(item: &PipelineItemCrdt) -> Option<PipelineItemV
agent,
retry_count,
depends_on,
claim,
qa_mode,
item_type,
epic,
@@ -432,9 +438,11 @@ fn project_stage_for_view(
story_id: &str,
merged_at: Option<f64>,
resume_to: Option<&str>,
claim_agent: Option<&str>,
claim_ts_secs: Option<u64>,
) -> Option<crate::pipeline_state::Stage> {
use crate::pipeline_state::{ArchiveReason, BranchName, GitSha, Stage};
use chrono::{DateTime, Utc};
use crate::pipeline_state::{AgentClaim, AgentName, ArchiveReason, BranchName, GitSha, Stage};
use chrono::{DateTime, TimeZone, Utc};
use std::num::NonZeroU32;
// Normalise legacy directory-style strings to their clean wire form so
@@ -458,13 +466,30 @@ fn project_stage_for_view(
// Story 945: resume target for `Frozen` / `ReviewHold` variants is stored
// in the sibling `resume_to` register. Fall back to `Coding` when the
// register is empty or holds an unrecognised value.
let resume_target =
|| -> Box<Stage> { Box::new(resume_to.and_then(Stage::from_dir).unwrap_or(Stage::Coding)) };
let resume_target = || -> Box<Stage> {
Box::new(
resume_to
.and_then(Stage::from_dir)
.unwrap_or(Stage::Coding { claim: None }),
)
};
// Story 1009: reconstruct AgentClaim from `claim_agent`/`claim_ts` registers.
let claim = match (claim_agent, claim_ts_secs) {
(Some(agent_str), Some(ts)) => Some(AgentClaim {
agent: AgentName(agent_str.to_string()),
claimed_at: Utc
.timestamp_opt(ts as i64, 0)
.single()
.unwrap_or(DateTime::<Utc>::UNIX_EPOCH),
}),
_ => None,
};
match clean {
"upcoming" => Some(Stage::Upcoming),
"backlog" => Some(Stage::Backlog),
"coding" => Some(Stage::Coding),
"coding" => Some(Stage::Coding { claim }),
"qa" => Some(Stage::Qa),
"blocked" => Some(Stage::Blocked {
reason: String::new(),
@@ -472,6 +497,7 @@ fn project_stage_for_view(
"merge" => Some(Stage::Merge {
feature_branch: BranchName(format!("feature/story-{story_id}")),
commits_ahead: NonZeroU32::new(1).expect("1 is non-zero"),
claim,
}),
"merge_failure" => {
// Story 986: read the typed kind directly from ContentKey::MergeFailureKind
@@ -709,8 +735,6 @@ mod tests {
None,
None,
None,
None,
None,
);
// The story is live on this node.
@@ -779,8 +803,6 @@ mod tests {
None,
None,
None,
None,
None,
);
assert!(
read_item(story_id).is_none(),
+4 -5
View File
@@ -117,8 +117,6 @@ async fn subscribe_receives_stage_transition_events() {
None,
None,
None,
None,
None,
);
let evt: CrdtEvent = rx.try_recv().expect("expected CrdtEvent on insert");
@@ -138,8 +136,6 @@ async fn subscribe_receives_stage_transition_events() {
None,
None,
None,
None,
None,
);
let evt: CrdtEvent = rx.try_recv().expect("expected CrdtEvent on stage change");
@@ -148,7 +144,10 @@ async fn subscribe_receives_stage_transition_events() {
evt.from_stage,
Some(crate::pipeline_state::Stage::Backlog)
));
assert!(matches!(evt.to_stage, crate::pipeline_state::Stage::Coding));
assert!(matches!(
evt.to_stage,
crate::pipeline_state::Stage::Coding { .. }
));
}
#[tokio::test]
+17 -37
View File
@@ -65,14 +65,13 @@ pub struct PipelineItemCrdt {
pub agent: LwwRegisterCrdt<String>,
pub retry_count: LwwRegisterCrdt<f64>,
pub depends_on: LwwRegisterCrdt<String>,
/// Node ID (hex-encoded Ed25519 pubkey) of the node that claimed this item.
/// Used for distributed work claiming — the LWW register resolves conflicts
/// deterministically so all nodes converge on the same claimer.
pub claimed_by: LwwRegisterCrdt<String>,
/// Name of the agent (e.g. `"coder-1"`) that has claimed this item.
/// Empty string means the item is unclaimed. Replaces the legacy
/// `claimed_by` node-hex register (story 1009).
pub claim_agent: LwwRegisterCrdt<String>,
/// Unix timestamp (seconds) when the claim was written.
/// Used for timeout-based reclaim: if a node crashes, other nodes can
/// reclaim the item after the timeout expires.
pub claimed_at: LwwRegisterCrdt<f64>,
/// Zero means no active claim. Previously named `claimed_at`.
pub claim_ts: LwwRegisterCrdt<f64>,
/// Unix timestamp (seconds) when the item was merged to master.
/// Written once when the item transitions to `5_done`. Used by the
/// sweep loop to determine when to promote to `6_archived`.
@@ -121,18 +120,6 @@ pub struct NodePresenceCrdt {
// ── Read-side view types ─────────────────────────────────────────────
/// Active claim on a pipeline item — node that owns it and when the claim was written.
///
/// Both fields must be present for a claim to be valid; a partial claim (node
/// but no timestamp, or vice versa) is treated as absent by `extract_item_view`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Claim {
/// Hex-encoded Ed25519 public key of the node that holds the claim.
pub node: String,
/// Unix timestamp (seconds, integer) when the claim was written.
pub at: u64,
}
/// Numeric identifier for an epic work item.
///
/// The numeric prefix of the epic's story_id (e.g. `EpicId(9990)` for the
@@ -203,9 +190,6 @@ pub struct WorkItem {
pub(super) retry_count: u32,
/// Dependency story numbers — empty `Vec` when the register is unset.
pub(super) depends_on: Vec<u32>,
/// Active claim (node + timestamp). `None` when the item is unclaimed or
/// when only one of the two companion registers is set.
pub(super) claim: Option<Claim>,
/// QA mode override. `None` means "use the project default".
pub(super) qa_mode: Option<crate::io::story_metadata::QaMode>,
/// Item type. `None` means "infer from the story_id slug prefix".
@@ -248,11 +232,6 @@ impl WorkItem {
&self.depends_on
}
/// Active claim on this item, or `None` when unclaimed.
pub fn claim(&self) -> Option<&Claim> {
self.claim.as_ref()
}
/// QA mode override, or `None` when the register is unset (use project default).
pub fn qa_mode(&self) -> Option<crate::io::story_metadata::QaMode> {
self.qa_mode
@@ -281,7 +260,6 @@ impl WorkItem {
agent: Option<crate::config::AgentName>,
retry_count: u32,
depends_on: Vec<u32>,
claim: Option<Claim>,
qa_mode: Option<crate::io::story_metadata::QaMode>,
item_type: Option<crate::io::story_metadata::ItemType>,
epic: Option<EpicId>,
@@ -293,7 +271,6 @@ impl WorkItem {
agent,
retry_count,
depends_on,
claim,
qa_mode,
item_type,
epic,
@@ -480,8 +457,8 @@ mod tests {
"retry_count": 0.0,
"blocked": false,
"depends_on": "",
"claimed_by": "",
"claimed_at": 0.0,
"claim_agent": "",
"claim_ts": 0.0,
})
.into();
@@ -515,8 +492,8 @@ mod tests {
"retry_count": 0.0,
"blocked": false,
"depends_on": "",
"claimed_by": "",
"claimed_at": 0.0,
"claim_agent": "",
"claim_ts": 0.0,
})
.into();
@@ -541,7 +518,7 @@ mod tests {
let evt = CrdtEvent {
story_id: "42_story_foo".to_string(),
from_stage: Some(crate::pipeline_state::Stage::Backlog),
to_stage: crate::pipeline_state::Stage::Coding,
to_stage: crate::pipeline_state::Stage::Coding { claim: None },
name: "Foo Feature".to_string(),
};
assert_eq!(evt.story_id, "42_story_foo");
@@ -549,7 +526,10 @@ mod tests {
evt.from_stage,
Some(crate::pipeline_state::Stage::Backlog)
));
assert!(matches!(evt.to_stage, crate::pipeline_state::Stage::Coding));
assert!(matches!(
evt.to_stage,
crate::pipeline_state::Stage::Coding { .. }
));
assert_eq!(evt.name, "Foo Feature");
}
@@ -698,7 +678,7 @@ mod tests {
let evt = CrdtEvent {
story_id: "70_story_broadcast".to_string(),
from_stage: Some(Stage::Backlog),
to_stage: Stage::Coding,
to_stage: Stage::Coding { claim: None },
name: "Broadcast Test".to_string(),
};
tx.send(evt).unwrap();
@@ -706,7 +686,7 @@ mod tests {
let received = rx.try_recv().unwrap();
assert_eq!(received.story_id, "70_story_broadcast");
assert!(matches!(received.from_stage, Some(Stage::Backlog)));
assert!(matches!(received.to_stage, Stage::Coding));
assert!(matches!(received.to_stage, Stage::Coding { .. }));
assert_eq!(received.name, "Broadcast Test");
}
}
+30 -21
View File
@@ -11,7 +11,7 @@ use serde_json::json;
use super::super::state::{apply_and_persist, emit_event, get_crdt, rebuild_index};
use super::super::types::CrdtEvent;
use crate::io::story_metadata::QaMode;
use crate::pipeline_state::{Stage, stage_dir_name};
use crate::pipeline_state::{AgentClaim, Stage, stage_dir_name};
/// Set the typed `depends_on` CRDT register for a pipeline item.
///
@@ -221,7 +221,6 @@ pub fn set_qa_mode(story_id: &str, mode: Option<QaMode>) -> bool {
///
/// `stage` is the typed pipeline state; it is serialised to the canonical
/// clean wire form (story 934) via [`stage_dir_name`] at the CRDT boundary.
#[allow(clippy::too_many_arguments)]
pub fn write_item(
story_id: &str,
stage: &Stage,
@@ -229,11 +228,14 @@ pub fn write_item(
agent: Option<&str>,
retry_count: Option<i64>,
depends_on: Option<&str>,
claimed_by: Option<&str>,
claimed_at: Option<f64>,
merged_at: Option<f64>,
) {
let stage_str = stage_dir_name(stage);
let claim: Option<&AgentClaim> = match stage {
Stage::Coding { claim } => claim.as_ref(),
Stage::Merge { claim, .. } => claim.as_ref(),
_ => None,
};
let Some(state_mutex) = get_crdt() else {
return;
};
@@ -291,14 +293,19 @@ pub fn write_item(
s.crdt.doc.items[idx].depends_on.set(d.to_string())
});
}
if let Some(cb) = claimed_by {
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claimed_by.set(cb.to_string())
});
}
if let Some(ca) = claimed_at {
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claimed_at.set(ca));
}
let (claim_agent_str, claim_ts_val) = match claim {
Some(c) => (
c.agent.0.as_str().to_string(),
c.claimed_at.timestamp() as f64,
),
None => (String::new(), 0.0),
};
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_agent.set(claim_agent_str)
});
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_ts.set(claim_ts_val)
});
if let Some(ma) = merged_at {
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].merged_at.set(ma));
}
@@ -322,6 +329,13 @@ pub fn write_item(
}
} else {
// Insert new item.
let (insert_claim_agent, insert_claim_ts) = match claim {
Some(c) => (
c.agent.0.as_str().to_string(),
c.claimed_at.timestamp() as f64,
),
None => (String::new(), 0.0),
};
let item_json: JsonValue = json!({
"story_id": story_id,
"stage": stage_str,
@@ -329,8 +343,8 @@ pub fn write_item(
"agent": agent.unwrap_or(""),
"retry_count": retry_count.unwrap_or(0) as f64,
"depends_on": depends_on.unwrap_or(""),
"claimed_by": claimed_by.unwrap_or(""),
"claimed_at": claimed_at.unwrap_or(0.0),
"claim_agent": insert_claim_agent,
"claim_ts": insert_claim_ts,
"merged_at": merged_at.unwrap_or(0.0),
"qa_mode": "",
"item_type": "",
@@ -357,8 +371,8 @@ pub fn write_item(
item.agent.advance_seq(floor);
item.retry_count.advance_seq(floor);
item.depends_on.advance_seq(floor);
item.claimed_by.advance_seq(floor);
item.claimed_at.advance_seq(floor);
item.claim_agent.advance_seq(floor);
item.claim_ts.advance_seq(floor);
item.merged_at.advance_seq(floor);
item.qa_mode.advance_seq(floor);
item.item_type.advance_seq(floor);
@@ -384,7 +398,6 @@ pub fn write_item(
/// Stages are normalised through [`Stage::from_dir`]: unknown strings cause
/// the write to be skipped (with a log line).
#[cfg(test)]
#[allow(clippy::too_many_arguments)]
pub fn write_item_str(
story_id: &str,
stage: &str,
@@ -392,8 +405,6 @@ pub fn write_item_str(
agent: Option<&str>,
retry_count: Option<i64>,
depends_on: Option<&str>,
claimed_by: Option<&str>,
claimed_at: Option<f64>,
merged_at: Option<f64>,
) {
// Normalise pre-934 directory-style strings to clean wire form so
@@ -423,8 +434,6 @@ pub fn write_item_str(
agent,
retry_count,
depends_on,
claimed_by,
claimed_at,
merged_at,
);
}
+60 -8
View File
@@ -276,6 +276,57 @@ pub fn migrate_legacy_stage_strings() {
);
}
/// Clear legacy node-hex claims from `claim_agent` and `claim_ts` registers.
///
/// Pre-1009 nodes wrote the Ed25519 hex pubkey as `claimed_by`. That value
/// cannot be converted to an `AgentName`, so the safe migration is to wipe
/// any existing claim rather than carry over a semantically invalid string.
///
/// Only clears entries where `claim_agent` looks like a legacy node hex value
/// (64 hex chars). Entries that are already empty or contain an agent-name
/// string (shorter, mixed case) are left untouched.
pub fn migrate_node_claims_to_agent_claims() {
let Some(state_mutex) = get_crdt() else {
return;
};
let stale_indices: Vec<usize> = {
let Ok(state) = state_mutex.lock() else {
return;
};
state
.index
.values()
.copied()
.filter(|&idx| {
let item = &state.crdt.doc.items[idx];
match item.claim_agent.view() {
JsonValue::String(s) => {
s.len() == 64 && s.chars().all(|c| c.is_ascii_hexdigit())
}
_ => false,
}
})
.collect()
};
if stale_indices.is_empty() {
return;
}
let Ok(mut state) = state_mutex.lock() else {
return;
};
let count = stale_indices.len();
for idx in stale_indices {
apply_and_persist(&mut state, |s| {
s.crdt.doc.items[idx].claim_agent.set(String::new())
});
apply_and_persist(&mut state, |s| s.crdt.doc.items[idx].claim_ts.set(0.0));
}
slog!("[crdt] Cleared {count} legacy node-hex claim(s) from claim_agent/claim_ts");
}
#[cfg(test)]
mod stage_migration_tests {
use super::super::super::state::init_for_test;
@@ -299,8 +350,6 @@ mod stage_migration_tests {
None,
None,
None,
None,
None,
);
// Then overwrite the stage register with the raw legacy string,
// bypassing `db::normalise_stage_str` / `write_item_str`'s mapping.
@@ -318,7 +367,11 @@ mod stage_migration_tests {
let cases: &[(&str, &str, Stage)] = &[
("9501_legacy_upcoming", "0_upcoming", Stage::Upcoming),
("9502_legacy_backlog", "1_backlog", Stage::Backlog),
("9503_legacy_coding", "2_current", Stage::Coding),
(
"9503_legacy_coding",
"2_current",
Stage::Coding { claim: None },
),
(
"9504_legacy_blocked",
"2_blocked",
@@ -333,6 +386,7 @@ mod stage_migration_tests {
Stage::Merge {
feature_branch: BranchName(String::new()),
commits_ahead: NonZeroU32::new(1).unwrap(),
claim: None,
},
),
(
@@ -398,14 +452,12 @@ mod stage_migration_tests {
// Seed two items: one already in clean form, one in legacy form.
write_item(
"9520_already_clean",
&Stage::Coding,
&Stage::Coding { claim: None },
Some("Already Clean"),
None,
None,
None,
None,
None,
None,
);
seed_with_raw_stage("9521_needs_migration", "2_current");
@@ -416,11 +468,11 @@ mod stage_migration_tests {
let migrated = read_item("9521_needs_migration").unwrap();
assert!(matches!(
clean.stage(),
crate::pipeline_state::Stage::Coding
crate::pipeline_state::Stage::Coding { .. }
));
assert!(matches!(
migrated.stage(),
crate::pipeline_state::Stage::Coding
crate::pipeline_state::Stage::Coding { .. }
));
}
+1 -1
View File
@@ -18,5 +18,5 @@ pub use item::{
pub use item::write_item_str;
pub use migrations::{
migrate_legacy_stage_strings, migrate_merge_job, migrate_names_from_slugs,
migrate_story_ids_to_numeric, name_from_story_id,
migrate_node_claims_to_agent_claims, migrate_story_ids_to_numeric, name_from_story_id,
};
+5 -34
View File
@@ -98,8 +98,6 @@ fn migrate_story_ids_to_numeric_rewrites_slug_ids() {
None,
None,
None,
None,
None,
);
let result = migrate_story_ids_to_numeric();
@@ -130,8 +128,6 @@ fn migrate_story_ids_to_numeric_is_idempotent() {
None,
None,
None,
None,
None,
);
// First call — nothing to migrate.
@@ -159,8 +155,6 @@ fn migrate_story_ids_to_numeric_skips_conflict() {
None,
None,
None,
None,
None,
);
write_item_str(
"44",
@@ -170,8 +164,6 @@ fn migrate_story_ids_to_numeric_skips_conflict() {
None,
None,
None,
None,
None,
);
let result = migrate_story_ids_to_numeric();
@@ -204,14 +196,15 @@ fn migrate_story_ids_to_numeric_preserves_stage_and_name() {
None,
None,
None,
None,
None,
);
migrate_story_ids_to_numeric();
let item = read_item("45").expect("item must be accessible by numeric ID");
assert!(matches!(item.stage, crate::pipeline_state::Stage::Coding));
assert!(matches!(
item.stage,
crate::pipeline_state::Stage::Coding { .. }
));
assert_eq!(item.name, "Crash Bug");
assert_eq!(item.agent.map(|a| a.as_str()), Some("coder-1"));
}
@@ -229,8 +222,6 @@ fn migrate_names_from_slugs_fills_empty_names() {
None,
None,
None,
None,
None,
);
// Before migration: nameless item is filtered by read_item (AC 5).
@@ -261,8 +252,6 @@ fn migrate_names_from_slugs_leaves_existing_names_unchanged() {
None,
None,
None,
None,
None,
);
migrate_names_from_slugs();
@@ -297,8 +286,6 @@ fn set_depends_on_round_trip_and_clear() {
None,
None,
None,
None,
None,
);
// Set depends_on to [837] and verify CRDT register holds the list.
@@ -352,8 +339,6 @@ fn set_agent_some_writes_name() {
None,
None,
None,
None,
None,
);
let found = set_agent(
@@ -382,8 +367,6 @@ fn set_agent_none_clears_register() {
None,
None,
None,
None,
None,
);
// Confirm agent is set.
@@ -430,8 +413,6 @@ fn set_qa_mode_round_trip_server_then_human() {
None,
None,
None,
None,
None,
);
// Set qa=server via typed path and assert CRDT register reflects it.
@@ -485,8 +466,6 @@ fn set_qa_mode_round_trip_all_variants() {
None,
None,
None,
None,
None,
);
for mode in [QaMode::Server, QaMode::Agent, QaMode::Human] {
@@ -523,8 +502,6 @@ fn bump_retry_count_increments_by_one() {
None,
None,
None,
None,
None,
);
let v1 = bump_retry_count("9001_story_bump_test");
@@ -548,8 +525,6 @@ fn set_retry_count_resets_to_zero() {
Some(5),
None,
None,
None,
None,
);
set_retry_count("9002_story_set_test", 0);
@@ -666,7 +641,7 @@ async fn bug_511_rowid_replay_preserves_field_update_after_list_insert() {
let idx2 = index2["511_story_target"];
let view = extract_item_view(&crdt2.doc.items[idx2]).unwrap();
assert!(
matches!(view.stage, crate::pipeline_state::Stage::Coding),
matches!(view.stage, crate::pipeline_state::Stage::Coding { .. }),
"stage field update lost during replay (bug 511 regression)"
);
@@ -727,8 +702,6 @@ async fn tombstone_survives_concurrent_writes() {
None,
None,
None,
None,
None,
);
assert!(
read_item(story_id).is_some(),
@@ -748,8 +721,6 @@ async fn tombstone_survives_concurrent_writes() {
None,
None,
None,
None,
None,
);
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
}