2026-04-04 15:20:35 +00:00
|
|
|
use std::path::Path;
|
2026-03-22 19:07:07 +00:00
|
|
|
use std::process::Command;
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
use crate::io::story_metadata::clear_front_matter_field_in_content;
|
2026-03-22 19:07:07 +00:00
|
|
|
use crate::slog;
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
type ContentTransform = Option<Box<dyn Fn(&str) -> String>>;
|
|
|
|
|
|
2026-03-22 19:07:07 +00:00
|
|
|
pub(super) fn item_type_from_id(item_id: &str) -> &'static str {
|
|
|
|
|
// New format: {digits}_{type}_{slug}
|
|
|
|
|
let after_num = item_id.trim_start_matches(|c: char| c.is_ascii_digit());
|
|
|
|
|
if after_num.starts_with("_bug_") {
|
|
|
|
|
"bug"
|
|
|
|
|
} else if after_num.starts_with("_spike_") {
|
|
|
|
|
"spike"
|
|
|
|
|
} else {
|
|
|
|
|
"story"
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
/// Move a work item to a new pipeline stage via the database.
|
|
|
|
|
///
|
|
|
|
|
/// Looks up the item in the CRDT to verify it exists in one of the expected
|
|
|
|
|
/// `sources` stages, then updates the stage. Optionally clears front-matter
|
|
|
|
|
/// fields from the stored content. Returns the source stage on success.
|
2026-04-04 15:20:35 +00:00
|
|
|
fn move_item<'a>(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
sources: &'a [&'a str],
|
|
|
|
|
target_dir: &str,
|
|
|
|
|
extra_done_dirs: &[&str],
|
|
|
|
|
missing_ok: bool,
|
|
|
|
|
fields_to_clear: &[&str],
|
|
|
|
|
) -> Result<Option<&'a str>, String> {
|
2026-04-08 03:03:59 +00:00
|
|
|
// Check if the item is already in the target stage or a done stage.
|
|
|
|
|
if let Some(item) = crate::crdt_state::read_item(story_id) {
|
|
|
|
|
if item.stage == target_dir
|
|
|
|
|
|| extra_done_dirs.iter().any(|d| item.stage == *d)
|
|
|
|
|
{
|
|
|
|
|
return Ok(None); // Idempotent: already there.
|
2026-04-04 15:20:35 +00:00
|
|
|
}
|
2026-04-08 03:03:59 +00:00
|
|
|
|
|
|
|
|
// Verify it's in one of the expected source stages.
|
|
|
|
|
let src_dir = sources.iter().find(|&&s| item.stage == s).copied();
|
|
|
|
|
if src_dir.is_none() && !missing_ok {
|
|
|
|
|
let locs = sources
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|s| format!("work/{s}/"))
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join(" or ");
|
2026-04-04 15:20:35 +00:00
|
|
|
return Err(format!("Work item '{story_id}' not found in {locs}."));
|
|
|
|
|
}
|
2026-04-08 03:03:59 +00:00
|
|
|
let src_dir = src_dir.unwrap_or(sources[0]);
|
|
|
|
|
|
|
|
|
|
// Optionally clear front-matter fields from the stored content.
|
|
|
|
|
let transform: ContentTransform = if fields_to_clear.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
let fields: Vec<String> = fields_to_clear.iter().map(|s| s.to_string()).collect();
|
|
|
|
|
Some(Box::new(move |content: &str| {
|
|
|
|
|
let mut result = content.to_string();
|
|
|
|
|
for field in &fields {
|
|
|
|
|
result = clear_front_matter_field_in_content(&result, field);
|
|
|
|
|
}
|
|
|
|
|
result
|
|
|
|
|
}))
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
crate::db::move_item_stage(
|
|
|
|
|
story_id,
|
|
|
|
|
target_dir,
|
|
|
|
|
transform.as_ref().map(|f| f.as_ref()),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
slog!("[lifecycle] Moved '{story_id}' from work/{src_dir}/ to work/{target_dir}/");
|
|
|
|
|
return Ok(Some(src_dir));
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// Item not found in CRDT — check the content store as fallback.
|
|
|
|
|
if crate::db::read_content(story_id).is_some() {
|
|
|
|
|
// Content exists but not in CRDT yet — write it through.
|
|
|
|
|
let content = crate::db::read_content(story_id).unwrap();
|
|
|
|
|
crate::db::write_item_with_content(story_id, target_dir, &content);
|
|
|
|
|
slog!("[lifecycle] Moved '{story_id}' to work/{target_dir}/ (content store fallback)");
|
|
|
|
|
return Ok(Some(sources[0]));
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// Try filesystem fallback for backwards compatibility during migration.
|
|
|
|
|
{
|
|
|
|
|
let sk = project_root.join(".huskies").join("work");
|
|
|
|
|
if let Some((src_dir, src_path)) = sources.iter().find_map(|&s| {
|
|
|
|
|
let p = sk.join(s).join(format!("{story_id}.md"));
|
|
|
|
|
p.exists().then_some((s, p))
|
|
|
|
|
}) && let Ok(mut content) = std::fs::read_to_string(&src_path) {
|
|
|
|
|
// Optionally clear front-matter fields.
|
|
|
|
|
for field in fields_to_clear {
|
|
|
|
|
content = clear_front_matter_field_in_content(&content, field);
|
|
|
|
|
}
|
|
|
|
|
// Import to DB.
|
|
|
|
|
crate::db::write_item_with_content(story_id, target_dir, &content);
|
|
|
|
|
// Also move on filesystem for backwards compat.
|
|
|
|
|
let target_path = sk.join(target_dir).join(format!("{story_id}.md"));
|
|
|
|
|
let _ = std::fs::create_dir_all(sk.join(target_dir));
|
|
|
|
|
let _ = std::fs::write(&target_path, &content);
|
|
|
|
|
// Only remove the source if it differs from the target (avoid
|
|
|
|
|
// deleting the file when src and target are the same directory).
|
|
|
|
|
if src_dir != target_dir {
|
|
|
|
|
let _ = std::fs::remove_file(&src_path);
|
|
|
|
|
}
|
|
|
|
|
slog!("[lifecycle] Moved '{story_id}' from work/{src_dir}/ to work/{target_dir}/");
|
|
|
|
|
return Ok(Some(src_dir));
|
2026-04-04 15:20:35 +00:00
|
|
|
}
|
|
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
if missing_ok {
|
|
|
|
|
slog!("[lifecycle] Work item '{story_id}' not found; skipping move to work/{target_dir}/");
|
|
|
|
|
return Ok(None);
|
|
|
|
|
}
|
2026-04-07 13:09:48 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
let locs = sources
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|s| format!("work/{s}/"))
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join(" or ");
|
|
|
|
|
Err(format!("Work item '{story_id}' not found in {locs}."))
|
2026-04-04 15:20:35 +00:00
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Move a work item (story, bug, or spike) from `work/1_backlog/` to `work/2_current/`.
|
|
|
|
|
///
|
|
|
|
|
/// Idempotent: if already in `2_current/`, returns Ok. If not found in `1_backlog/`, logs and returns Ok.
|
|
|
|
|
pub fn move_story_to_current(project_root: &Path, story_id: &str) -> Result<(), String> {
|
|
|
|
|
move_item(project_root, story_id, &["1_backlog"], "2_current", &[], true, &[]).map(|_| ())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Check whether a feature branch `feature/story-{story_id}` exists and has
|
|
|
|
|
/// commits that are not yet on master. Returns `true` when there is unmerged
|
|
|
|
|
/// work, `false` when there is no branch or all its commits are already
|
|
|
|
|
/// reachable from master.
|
|
|
|
|
pub fn feature_branch_has_unmerged_changes(project_root: &Path, story_id: &str) -> bool {
|
|
|
|
|
let branch = format!("feature/story-{story_id}");
|
|
|
|
|
|
|
|
|
|
// Check if the branch exists.
|
|
|
|
|
let branch_check = Command::new("git")
|
|
|
|
|
.args(["rev-parse", "--verify", &branch])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
match branch_check {
|
|
|
|
|
Ok(out) if out.status.success() => {}
|
|
|
|
|
_ => return false, // No feature branch → nothing to merge.
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if the branch has commits not reachable from master.
|
|
|
|
|
let log = Command::new("git")
|
|
|
|
|
.args(["log", &format!("master..{branch}"), "--oneline"])
|
|
|
|
|
.current_dir(project_root)
|
|
|
|
|
.output();
|
|
|
|
|
match log {
|
|
|
|
|
Ok(out) => {
|
|
|
|
|
let stdout = String::from_utf8_lossy(&out.stdout);
|
|
|
|
|
!stdout.trim().is_empty()
|
|
|
|
|
}
|
|
|
|
|
Err(_) => false,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Move a story from `work/2_current/` or `work/4_merge/` to `work/5_done/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Idempotent if already in `5_done/` or `6_archived/`. Errors if not found in `2_current/` or `4_merge/`.
|
2026-03-28 12:37:03 +00:00
|
|
|
pub fn move_story_to_done(project_root: &Path, story_id: &str) -> Result<(), String> {
|
2026-04-04 15:20:35 +00:00
|
|
|
move_item(
|
|
|
|
|
project_root,
|
|
|
|
|
story_id,
|
|
|
|
|
&["2_current", "4_merge"],
|
|
|
|
|
"5_done",
|
|
|
|
|
&["6_archived"],
|
|
|
|
|
false,
|
|
|
|
|
&["merge_failure", "retry_count", "blocked"],
|
|
|
|
|
)
|
|
|
|
|
.map(|_| ())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Move a story/bug from `work/2_current/` or `work/3_qa/` to `work/4_merge/`.
|
|
|
|
|
///
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Idempotent if already in `4_merge/`. Errors if not found in `2_current/` or `3_qa/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn move_story_to_merge(project_root: &Path, story_id: &str) -> Result<(), String> {
|
2026-04-04 15:20:35 +00:00
|
|
|
move_item(
|
|
|
|
|
project_root,
|
|
|
|
|
story_id,
|
|
|
|
|
&["2_current", "3_qa"],
|
|
|
|
|
"4_merge",
|
|
|
|
|
&[],
|
|
|
|
|
false,
|
|
|
|
|
&["retry_count", "blocked"],
|
|
|
|
|
)
|
|
|
|
|
.map(|_| ())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Move a story/bug from `work/2_current/` to `work/3_qa/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Idempotent if already in `3_qa/`. Errors if not found in `2_current/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn move_story_to_qa(project_root: &Path, story_id: &str) -> Result<(), String> {
|
2026-04-04 15:20:35 +00:00
|
|
|
move_item(
|
|
|
|
|
project_root,
|
|
|
|
|
story_id,
|
|
|
|
|
&["2_current"],
|
|
|
|
|
"3_qa",
|
|
|
|
|
&[],
|
|
|
|
|
false,
|
|
|
|
|
&["retry_count", "blocked"],
|
|
|
|
|
)
|
|
|
|
|
.map(|_| ())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Move a story from `work/3_qa/` back to `work/2_current/`, clearing `review_hold` and writing notes.
|
|
|
|
|
pub fn reject_story_from_qa(project_root: &Path, story_id: &str, notes: &str) -> Result<(), String> {
|
|
|
|
|
let moved = move_item(project_root, story_id, &["3_qa"], "2_current", &[], false, &["review_hold"])?;
|
|
|
|
|
if moved.is_some() && !notes.is_empty() {
|
2026-04-08 03:03:59 +00:00
|
|
|
// Append rejection notes to the stored content.
|
|
|
|
|
if let Some(content) = crate::db::read_content(story_id) {
|
|
|
|
|
let updated = crate::io::story_metadata::write_rejection_notes_to_content(&content, notes);
|
|
|
|
|
crate::db::write_content(story_id, &updated);
|
|
|
|
|
// Re-sync to DB.
|
|
|
|
|
crate::db::write_item_with_content(story_id, "2_current", &updated);
|
2026-04-04 15:20:35 +00:00
|
|
|
}
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Move any work item to an arbitrary pipeline stage by searching all stages.
|
|
|
|
|
///
|
|
|
|
|
/// Accepts `target_stage` as one of: `backlog`, `current`, `qa`, `merge`, `done`.
|
|
|
|
|
/// Idempotent: if the item is already in the target stage, returns Ok.
|
|
|
|
|
/// Returns `(from_stage, to_stage)` on success.
|
|
|
|
|
pub fn move_story_to_stage(
|
|
|
|
|
project_root: &Path,
|
|
|
|
|
story_id: &str,
|
|
|
|
|
target_stage: &str,
|
|
|
|
|
) -> Result<(String, String), String> {
|
2026-04-04 15:20:35 +00:00
|
|
|
const STAGES: &[(&str, &str)] = &[
|
2026-03-22 19:07:07 +00:00
|
|
|
("backlog", "1_backlog"),
|
|
|
|
|
("current", "2_current"),
|
|
|
|
|
("qa", "3_qa"),
|
|
|
|
|
("merge", "4_merge"),
|
|
|
|
|
("done", "5_done"),
|
2026-04-04 15:20:35 +00:00
|
|
|
("archived", "6_archived"),
|
2026-03-22 19:07:07 +00:00
|
|
|
];
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
let target_dir = STAGES
|
2026-03-22 19:07:07 +00:00
|
|
|
.iter()
|
2026-04-04 15:20:35 +00:00
|
|
|
.filter(|(name, _)| *name != "archived")
|
2026-03-22 19:07:07 +00:00
|
|
|
.find(|(name, _)| *name == target_stage)
|
|
|
|
|
.map(|(_, dir)| *dir)
|
|
|
|
|
.ok_or_else(|| {
|
|
|
|
|
format!(
|
|
|
|
|
"Invalid target_stage '{target_stage}'. Must be one of: backlog, current, qa, merge, done"
|
|
|
|
|
)
|
|
|
|
|
})?;
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
let all_dirs: Vec<&str> = STAGES.iter().map(|(_, dir)| *dir).collect();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
match move_item(project_root, story_id, &all_dirs, target_dir, &[], false, &[])
|
|
|
|
|
.map_err(|_| format!("Work item '{story_id}' not found in any pipeline stage."))?
|
|
|
|
|
{
|
|
|
|
|
Some(src_dir) => {
|
|
|
|
|
let from_stage = STAGES
|
|
|
|
|
.iter()
|
|
|
|
|
.find(|(_, dir)| *dir == src_dir)
|
|
|
|
|
.map(|(name, _)| *name)
|
|
|
|
|
.unwrap_or(src_dir);
|
|
|
|
|
Ok((from_stage.to_string(), target_stage.to_string()))
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
2026-04-04 15:20:35 +00:00
|
|
|
None => Ok((target_stage.to_string(), target_stage.to_string())),
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Move a bug from `work/2_current/` or `work/1_backlog/` to `work/5_done/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
///
|
2026-04-04 15:20:35 +00:00
|
|
|
/// Idempotent if already in `5_done/`. Errors if not found in `2_current/` or `1_backlog/`.
|
2026-03-22 19:07:07 +00:00
|
|
|
pub fn close_bug_to_archive(project_root: &Path, bug_id: &str) -> Result<(), String> {
|
2026-04-04 15:20:35 +00:00
|
|
|
move_item(
|
|
|
|
|
project_root,
|
|
|
|
|
bug_id,
|
|
|
|
|
&["2_current", "1_backlog"],
|
|
|
|
|
"5_done",
|
|
|
|
|
&[],
|
|
|
|
|
false,
|
|
|
|
|
&[],
|
|
|
|
|
)
|
|
|
|
|
.map(|_| ())
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
|
|
// ── move_story_to_current tests ────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-04-08 03:03:59 +00:00
|
|
|
fn move_story_to_current_from_filesystem() {
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
2026-04-08 03:03:59 +00:00
|
|
|
let backlog = tmp.path().join(".huskies/work/1_backlog");
|
|
|
|
|
let current = tmp.path().join(".huskies/work/2_current");
|
|
|
|
|
std::fs::create_dir_all(&backlog).unwrap();
|
|
|
|
|
std::fs::create_dir_all(¤t).unwrap();
|
|
|
|
|
std::fs::write(
|
|
|
|
|
backlog.join("10_story_foo.md"),
|
|
|
|
|
"---\nname: Test\n---\n# Story\n",
|
|
|
|
|
)
|
|
|
|
|
.unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
move_story_to_current(tmp.path(), "10_story_foo").unwrap();
|
2026-03-22 19:07:07 +00:00
|
|
|
|
2026-04-08 03:03:59 +00:00
|
|
|
// Verify the story was moved to current.
|
|
|
|
|
assert!(
|
|
|
|
|
current.join("10_story_foo.md").exists(),
|
|
|
|
|
"story should be in 2_current/"
|
|
|
|
|
);
|
|
|
|
|
assert!(
|
|
|
|
|
!backlog.join("10_story_foo.md").exists(),
|
|
|
|
|
"story should not still be in 1_backlog/"
|
|
|
|
|
);
|
2026-03-22 19:07:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-04-08 03:03:59 +00:00
|
|
|
fn move_story_to_current_noop_when_not_found() {
|
2026-03-22 19:07:07 +00:00
|
|
|
let tmp = tempfile::tempdir().unwrap();
|
|
|
|
|
assert!(move_story_to_current(tmp.path(), "99_missing").is_ok());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── item_type_from_id tests ────────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn item_type_from_id_detects_types() {
|
|
|
|
|
assert_eq!(item_type_from_id("1_bug_test"), "bug");
|
|
|
|
|
assert_eq!(item_type_from_id("1_spike_research"), "spike");
|
|
|
|
|
assert_eq!(item_type_from_id("50_story_my_story"), "story");
|
|
|
|
|
assert_eq!(item_type_from_id("1_story_simple"), "story");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ── feature_branch_has_unmerged_changes tests ────────────────────────────
|
|
|
|
|
|
|
|
|
|
fn init_git_repo(repo: &std::path::Path) {
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["config", "user.email", "test@test.com"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["config", "user.name", "Test"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "--allow-empty", "-m", "init"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Bug 226: feature_branch_has_unmerged_changes returns true when the
|
|
|
|
|
/// feature branch has commits not on master.
|
|
|
|
|
#[test]
|
|
|
|
|
fn feature_branch_has_unmerged_changes_detects_unmerged_code() {
|
|
|
|
|
use std::fs;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let repo = tmp.path();
|
|
|
|
|
init_git_repo(repo);
|
|
|
|
|
|
|
|
|
|
// Create a feature branch with a code commit.
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["checkout", "-b", "feature/story-50_story_test"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
fs::write(repo.join("feature.rs"), "fn main() {}").unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["add", "."])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["commit", "-m", "add feature"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(["checkout", "master"])
|
|
|
|
|
.current_dir(repo)
|
|
|
|
|
.output()
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
feature_branch_has_unmerged_changes(repo, "50_story_test"),
|
|
|
|
|
"should detect unmerged changes on feature branch"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Bug 226: feature_branch_has_unmerged_changes returns false when no
|
|
|
|
|
/// feature branch exists.
|
|
|
|
|
#[test]
|
|
|
|
|
fn feature_branch_has_unmerged_changes_false_when_no_branch() {
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
let tmp = tempdir().unwrap();
|
|
|
|
|
let repo = tmp.path();
|
|
|
|
|
init_git_repo(repo);
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
!feature_branch_has_unmerged_changes(repo, "99_nonexistent"),
|
|
|
|
|
"should return false when no feature branch"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|