Skip to content

Commit

Permalink
Allow simultaneous rebasing of duplicate commits
Browse files Browse the repository at this point in the history
Fixes #694
  • Loading branch information
ilyagr committed Jan 15, 2023
1 parent f3955af commit 9bec9ec
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 10 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
the jj repo from a Git repo. They will now be considered as non-existent if
referenced explicitly instead of crashing.

* `jj duplicate` followed by `jj rebase` of a tree containing both the original
and duplicate commit no longer crashes.

### Contributors

Thanks to the people who made this release happen!
Expand Down
23 changes: 18 additions & 5 deletions lib/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ use std::collections::HashMap;
use std::io::Read;
use std::sync::{Arc, RwLock};

use crate::backend;
use crate::backend::{
Backend, BackendResult, CommitId, Conflict, ConflictId, FileId, SymlinkId, TreeId,
self, Backend, BackendError, BackendResult, CommitId, Conflict, ConflictId, FileId, SymlinkId,
TreeId,
};
use crate::commit::Commit;
use crate::repo_path::RepoPath;
Expand Down Expand Up @@ -81,11 +81,24 @@ impl Store {
write_locked_cache.insert(id.clone(), data.clone());
Ok(data)
}

pub fn write_commit(self: &Arc<Self>, commit: backend::Commit) -> BackendResult<Commit> {
assert!(!commit.parents.is_empty());
let commit_id = self.backend.write_commit(&commit)?;
let data = Arc::new(commit);
let mut mut_commit = commit;
let commit_id = loop {
// It's possible that another commit with the same commit id (but different
// change id) already exists. Fundge the timestamp until this is no
// longer the case.
let maybe_commit_id = self.backend.write_commit(&mut_commit);
match maybe_commit_id {
Err(BackendError::AlreadyHaveCommitWithId { .. }) => {
// This is measured in milliseconds, and Git can't handle durations
// less than 1 second.
mut_commit.committer.timestamp.timestamp.0 -= 1000
}
_ => break maybe_commit_id?,
}
};
let data = Arc::new(mut_commit);
{
let mut write_locked_cache = self.commit_cache.write().unwrap();
write_locked_cache.insert(commit_id.clone(), data.clone());
Expand Down
20 changes: 15 additions & 5 deletions tests/test_duplicate_command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,10 +127,21 @@ fn test_rebase_duplicates() {
o 000000000000 (no description set) @ 1970-01-01 00:00:00.000 +00:00
"###);

// This is the bug: this should succeed
let stderr = test_env.jj_cmd_failure(&repo_path, &["rebase", "-s", "a", "-d", "a-"]);
insta::assert_snapshot!(stderr, @r###"
Error: Unexpected error from backend: Git commit '29bd36b60e6002f04e03c5077f989c93e3c910e1' already exists with different associated non-Git meta-data
let stdout = test_env.jj_cmd_success(&repo_path, &["rebase", "-s", "a", "-d", "a-"]);
insta::assert_snapshot!(stdout, @r###"
Rebased 4 commits
Working copy now at: 29bd36b60e60 b
"###);
// One of the duplicate commit's timestamps was changed a little to make it have
// a different commit id from the other.
insta::assert_snapshot!(get_log_output_with_ts(&test_env, &repo_path), @r###"
o b43fe7354758 b @ 2001-02-03 04:05:14.000 +07:00
| o 08beb14c3ead b @ 2001-02-03 04:05:15.000 +07:00
|/
| @ 29bd36b60e60 b @ 2001-02-03 04:05:16.000 +07:00
|/
o 2f6dc5a1ffc2 a @ 2001-02-03 04:05:16.000 +07:00
o 000000000000 (no description set) @ 1970-01-01 00:00:00.000 +00:00
"###);
}
fn get_log_output(test_env: &TestEnvironment, repo_path: &Path) -> String {
Expand All @@ -144,7 +155,6 @@ fn get_log_output(test_env: &TestEnvironment, repo_path: &Path) -> String {
)
}

// The timestamp is relevant for the bugfix
fn get_log_output_with_ts(test_env: &TestEnvironment, repo_path: &Path) -> String {
test_env.jj_cmd_success(
repo_path,
Expand Down

0 comments on commit 9bec9ec

Please sign in to comment.