diff --git a/mercurial/commands.py b/mercurial/commands.py
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_bWVyY3VyaWFsL2NvbW1hbmRzLnB5..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_bWVyY3VyaWFsL2NvbW1hbmRzLnB5 100644
--- a/mercurial/commands.py
+++ b/mercurial/commands.py
@@ -571,7 +571,7 @@
     )
 
     def bad(x, y):
-        raise error.InputError(b"%s: %s" % (x, y))
+        raise error.Abort(b"%s: %s" % (x, y))
 
     m = scmutil.match(ctx, pats, opts, badfn=bad)
 
diff --git a/rust/Cargo.lock b/rust/Cargo.lock
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9DYXJnby5sb2Nr..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9DYXJnby5sb2Nr 100644
--- a/rust/Cargo.lock
+++ b/rust/Cargo.lock
@@ -842,6 +842,12 @@
 ]
 
 [[package]]
+name = "itoa"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
+
+[[package]]
 name = "jobserver"
 version = "0.1.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1359,6 +1365,7 @@
  "logging_timer",
  "rayon",
  "regex",
+ "serde_json",
  "shellexpand",
  "which",
  "whoami",
@@ -1387,6 +1394,12 @@
 ]
 
 [[package]]
+name = "ryu"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
+
+[[package]]
 name = "same-file"
 version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1420,5 +1433,5 @@
 
 [[package]]
 name = "serde"
-version = "1.0.215"
+version = "1.0.218"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1424,8 +1437,8 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
+checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
@@ -1426,8 +1439,8 @@
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.215"
+version = "1.0.218"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1433,5 +1446,5 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
+checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1439,6 +1452,18 @@
 ]
 
 [[package]]
+name = "serde_json"
+version = "1.0.140"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
 name = "serde_spanned"
 version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/rust/hg-core/src/config/mod.rs b/rust/hg-core/src/config/mod.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9jb25maWcvbW9kLnJz..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9jb25maWcvbW9kLnJz 100644
--- a/rust/hg-core/src/config/mod.rs
+++ b/rust/hg-core/src/config/mod.rs
@@ -23,6 +23,7 @@
 use self::layer::ConfigValue;
 use crate::errors::HgError;
 use crate::errors::{HgResultExt, IoResultExt};
+use crate::exit_codes;
 use crate::utils::files::get_bytes_from_os_str;
 use format_bytes::{write_bytes, DisplayBytes};
 use std::collections::HashSet;
@@ -841,6 +842,32 @@
             _ => None,
         }
     }
+
+    /// Returns the default username to be used in commits. Like ui.username()
+    /// in Python with acceptempty=False, but aborts rather than prompting for
+    /// input or falling back to the OS username and hostname.
+    pub fn username(&self) -> Result<Vec<u8>, HgError> {
+        if let Some(value) = env::var_os("HGUSER") {
+            if !value.is_empty() {
+                return Ok(value.into_encoded_bytes());
+            }
+        }
+        if let Some(value) = self.get_str(b"ui", b"username")? {
+            if !value.is_empty() {
+                return Ok(value.as_bytes().to_vec());
+            }
+        }
+        if let Some(value) = env::var_os("EMAIL") {
+            if !value.is_empty() {
+                return Ok(value.into_encoded_bytes());
+            }
+        }
+        Err(HgError::abort(
+            "no username supplied",
+            exit_codes::ABORT,
+            Some("use 'hg config --edit' to set your username".to_string()),
+        ))
+    }
 }
 
 /// Corresponds to `usage.resources[.<dimension>]`.
diff --git a/rust/hg-core/src/operations/annotate.rs b/rust/hg-core/src/operations/annotate.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2Fubm90YXRlLnJz..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2Fubm90YXRlLnJz 100644
--- a/rust/hg-core/src/operations/annotate.rs
+++ b/rust/hg-core/src/operations/annotate.rs
@@ -1,4 +1,2 @@
-use std::borrow::Cow;
-
 use crate::{
     bdiff::{self, Lines},
@@ -3,5 +1,6 @@
 use crate::{
     bdiff::{self, Lines},
+    dirstate::{owning::OwningDirstateMap, DirstateError},
     errors::HgError,
     repo::Repo,
     revlog::{
@@ -5,9 +4,8 @@
     errors::HgError,
     repo::Repo,
     revlog::{
-        changelog::Changelog,
-        filelog::{Filelog, FilelogRevisionData},
-        manifest::Manifestlog,
+        changelog::Changelog, filelog::Filelog, manifest::Manifestlog,
+        RevisionOrWdir,
     },
     utils::{
         self,
@@ -20,6 +18,7 @@
 use itertools::Itertools as _;
 use rayon::prelude::*;
 use self_cell::self_cell;
+use std::{borrow::Cow, cell::Ref};
 
 /// Options for [`annotate`].
 #[derive(Copy, Clone)]
@@ -53,7 +52,7 @@
     /// file's current path if it was copied or renamed in the past.
     pub path: HgPathBuf,
     /// The changelog revision that introduced the line.
-    pub revision: Revision,
+    pub revision: RevisionOrWdir,
     /// The one-based line number in the original file.
     pub line_number: u32,
 }
@@ -101,4 +100,45 @@
     line_number: u32,
 }
 
+/// [`Repo`] and related objects that often need to be passed together.
+struct RepoState<'a> {
+    repo: &'a Repo,
+    changelog: Ref<'a, Changelog>,
+    manifestlog: Ref<'a, Manifestlog>,
+    dirstate_parents: Option<[Revision; 2]>,
+    dirstate_map: Option<Ref<'a, OwningDirstateMap>>,
+}
+
+impl<'a> RepoState<'a> {
+    fn new(repo: &'a Repo, include_dirstate: bool) -> Result<Self, HgError> {
+        let changelog = repo.changelog()?;
+        let manifestlog = repo.manifestlog()?;
+        let (dirstate_parents, dirstate_map) = if include_dirstate {
+            let crate::DirstateParents { p1, p2 } = repo.dirstate_parents()?;
+            let p1 = changelog.rev_from_node(p1.into())?;
+            let p2 = changelog.rev_from_node(p2.into())?;
+            let dirstate_map =
+                repo.dirstate_map().map_err(from_dirstate_error)?;
+            (Some([p1, p2]), Some(dirstate_map))
+        } else {
+            (None, None)
+        };
+        Ok(Self {
+            repo,
+            changelog,
+            manifestlog,
+            dirstate_parents,
+            dirstate_map,
+        })
+    }
+
+    fn dirstate_parents(&self) -> [Revision; 2] {
+        self.dirstate_parents.expect("should be set for wdir")
+    }
+
+    fn dirstate_map(&'a self) -> &'a OwningDirstateMap {
+        self.dirstate_map.as_ref().expect("should be set for wdir")
+    }
+}
+
 /// Helper for keeping track of multiple filelogs.
@@ -104,4 +144,5 @@
 /// Helper for keeping track of multiple filelogs.
+/// Also abstracts over reading from filelogs and from the working directory.
 #[derive(Default)]
 struct FilelogSet {
     /// List of filelogs. The first one is for the root file being blamed.
@@ -121,7 +162,15 @@
 
 /// Identifies a file revision in a FilelogSet.
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct FileId {
+enum FileId {
+    /// The file in the working directory.
+    Wdir,
+    /// A revision of the file in a filelog.
+    Rev(RevFileId),
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct RevFileId {
     index: FilelogIndex,
     revision: Revision,
 }
@@ -150,9 +199,9 @@
         Ok(index)
     }
 
-    /// Opens a new filelog by path and returns the id for the given file node.
+    /// Opens a filelog by path and returns the id for the given file node.
     fn open_at_node(
         &mut self,
         repo: &Repo,
         path: &HgPath,
         node: Node,
@@ -154,9 +203,9 @@
     fn open_at_node(
         &mut self,
         repo: &Repo,
         path: &HgPath,
         node: Node,
-    ) -> Result<FileId, HgError> {
+    ) -> Result<RevFileId, HgError> {
         let index = self.open(repo, path)?;
         let revision =
             self.get(index).filelog.revlog.rev_from_node(node.into())?;
@@ -160,7 +209,51 @@
         let index = self.open(repo, path)?;
         let revision =
             self.get(index).filelog.revlog.rev_from_node(node.into())?;
-        Ok(FileId { index, revision })
+        Ok(RevFileId { index, revision })
+    }
+
+    /// Opens a filelog by path and returns the id for the given changelog
+    /// revision. Returns `None` if no filelog exists for that path.
+    fn open_at_changelog_rev(
+        &mut self,
+        state: &RepoState,
+        path: &HgPath,
+        changelog_revision: Revision,
+    ) -> Result<Option<RevFileId>, HgError> {
+        let changelog_data =
+            state.changelog.entry(changelog_revision)?.data()?;
+        let manifest = state
+            .manifestlog
+            .data_for_node(changelog_data.manifest_node()?.into())?;
+        let Some(entry) = manifest.find_by_path(path)? else {
+            return Ok(None);
+        };
+        let node = entry.node_id()?;
+        Ok(Some(self.open_at_node(state.repo, path, node)?))
+    }
+
+    /// Opens and reads a file by path at a changelog revision (or working
+    /// directory), returning its id and contents. Returns `None` if not found.
+    fn open_and_read(
+        &mut self,
+        state: &RepoState,
+        path: &HgPath,
+        revision: RevisionOrWdir,
+    ) -> Result<Option<(FileId, Vec<u8>)>, HgError> {
+        match revision.exclude_wdir() {
+            Some(revision) => {
+                match self.open_at_changelog_rev(state, path, revision)? {
+                    None => Ok(None),
+                    Some(id) => Ok(Some((FileId::Rev(id), self.read(id)?))),
+                }
+            }
+            None => {
+                let fs_path = utils::hg_path::hg_path_to_path_buf(path)?;
+                let maybe_data =
+                    state.repo.working_directory_vfs().try_read(fs_path)?;
+                Ok(maybe_data.map(|data| (FileId::Wdir, data)))
+            }
+        }
     }
 
     /// Reads the contents of a file by id.
@@ -164,8 +257,9 @@
     }
 
     /// Reads the contents of a file by id.
-    fn read(&self, id: FileId) -> Result<FilelogRevisionData, HgError> {
-        self.get(id.index).filelog.entry(id.revision)?.data()
+    fn read(&self, id: RevFileId) -> Result<Vec<u8>, HgError> {
+        let filelog = &self.get(id.index).filelog;
+        filelog.entry(id.revision)?.data()?.into_file_data()
     }
 
     /// Returns the parents of a file. If `follow_copies` is true, it treats
@@ -173,7 +267,8 @@
     /// (since it has to read the file to extract the copy metadata).
     fn parents(
         &mut self,
-        repo: &Repo,
+        state: &RepoState,
+        base_path: &HgPath,
         id: FileId,
         follow_copies: bool,
     ) -> Result<(Vec<FileId>, Option<Vec<u8>>), HgError> {
@@ -177,6 +272,23 @@
         id: FileId,
         follow_copies: bool,
     ) -> Result<(Vec<FileId>, Option<Vec<u8>>), HgError> {
+        let mut parents = Vec::<FileId>::with_capacity(2);
+        let FileId::Rev(id) = id else {
+            // If a file in the working directory is copied/renamed, its parent
+            // is the copy source (just as it will be after committing).
+            let path = state
+                .dirstate_map()
+                .copy_map_get(base_path)?
+                .unwrap_or(base_path);
+            for rev in state.dirstate_parents() {
+                if let Some(id) =
+                    self.open_at_changelog_rev(state, path, rev)?
+                {
+                    parents.push(FileId::Rev(id));
+                }
+            }
+            return Ok((parents, None));
+        };
         let filelog = &self.get(id.index).filelog;
         let revisions =
             filelog.parents(id.revision).map_err(from_graph_error)?;
@@ -180,6 +292,5 @@
         let filelog = &self.get(id.index).filelog;
         let revisions =
             filelog.parents(id.revision).map_err(from_graph_error)?;
-        let mut parents = Vec::with_capacity(2);
         let mut file_data = None;
         if revisions[0] != NULL_REVISION {
@@ -184,5 +295,5 @@
         let mut file_data = None;
         if revisions[0] != NULL_REVISION {
-            parents.push(FileId {
+            parents.push(FileId::Rev(RevFileId {
                 index: id.index,
                 revision: revisions[0],
@@ -187,6 +298,6 @@
                 index: id.index,
                 revision: revisions[0],
-            });
+            }));
         } else if follow_copies {
             // A null p1 indicates there might be copy metadata.
             // Check for it, and if present use it as the parent.
@@ -195,8 +306,10 @@
             // If copy or copyrev occurs without the other, ignore it.
             // This matches filerevisioncopied in storageutil.py.
             if let (Some(copy), Some(copyrev)) = (meta.copy, meta.copyrev) {
-                parents.push(self.open_at_node(repo, copy, copyrev)?);
+                parents.push(FileId::Rev(
+                    self.open_at_node(state.repo, copy, copyrev)?,
+                ));
             }
             file_data = Some(data.into_file_data()?);
         }
         if revisions[1] != NULL_REVISION {
@@ -199,7 +312,7 @@
             }
             file_data = Some(data.into_file_data()?);
         }
         if revisions[1] != NULL_REVISION {
-            parents.push(FileId {
+            parents.push(FileId::Rev(RevFileId {
                 index: id.index,
                 revision: revisions[1],
@@ -204,6 +317,6 @@
                 index: id.index,
                 revision: revisions[1],
-            });
+            }));
         }
         Ok((parents, file_data))
     }
@@ -213,6 +326,8 @@
 #[derive(Default)]
 struct FileInfo {
     /// Parents of this revision (via p1 and p2 or copy metadata).
+    /// These are always `FileId::Rev`, not `FileId::Wdir`, but we store
+    /// `FileId` because everything would have to convert to it anyways.
     parents: Option<Vec<FileId>>,
     /// Current state for annotating the file.
     file: AnnotatedFileState,
@@ -222,7 +337,7 @@
     revision: ChangelogRevisionState,
     /// The value of `revision` from a descendant. If the linkrev needs
     /// adjustment, we can start iterating the changelog here.
-    descendant: Option<Revision>,
+    descendant: Option<RevisionOrWdir>,
 }
 
 /// State enum for reading a file and annotating it.
@@ -241,7 +356,7 @@
     #[default]
     NotNeeded,
     Needed,
-    Done(Revision),
+    Done(RevisionOrWdir),
 }
 
 /// A collection of [`FileInfo`], forming a graph via [`FileInfo::parents`].
@@ -271,7 +386,7 @@
 pub fn annotate(
     repo: &Repo,
     path: &HgPath,
-    changelog_revision: Revision,
+    changelog_revision: RevisionOrWdir,
     options: AnnotateOptions,
 ) -> Result<AnnotateOutput, HgError> {
     // Step 1: Load the base file and check if it's binary.
@@ -275,6 +390,5 @@
     options: AnnotateOptions,
 ) -> Result<AnnotateOutput, HgError> {
     // Step 1: Load the base file and check if it's binary.
-    let changelog = repo.changelog()?;
-    let manifestlog = repo.manifestlog()?;
+    let state = RepoState::new(repo, changelog_revision.is_wdir())?;
     let mut fls = FilelogSet::default();
@@ -280,10 +394,6 @@
     let mut fls = FilelogSet::default();
-    let base_id = {
-        let changelog_data = changelog.entry(changelog_revision)?.data()?;
-        let manifest = manifestlog
-            .data_for_node(changelog_data.manifest_node()?.into())?;
-        let Some(entry) = manifest.find_by_path(path)? else {
-            return Ok(AnnotateOutput::NotFound);
-        };
-        fls.open_at_node(repo, path, entry.node_id()?)?
+    let Some((base_id, base_file_data)) =
+        fls.open_and_read(&state, path, changelog_revision)?
+    else {
+        return Ok(AnnotateOutput::NotFound);
     };
@@ -289,5 +399,4 @@
     };
-    let base_file_data = fls.read(base_id)?.into_file_data()?;
     if !options.treat_binary_as_text
         && utils::files::is_binary(&base_file_data)
     {
@@ -303,7 +412,7 @@
             continue;
         }
         let (parents, file_data) =
-            fls.parents(repo, id, options.follow_copies)?;
+            fls.parents(&state, path, id, options.follow_copies)?;
         info.parents = Some(parents.clone());
         if let Some(data) = file_data {
             info.file = AnnotatedFileState::Read(OwnedLines::split(
@@ -336,4 +445,7 @@
     graph.0.par_iter_mut().try_for_each(
         |(&id, info)| -> Result<(), HgError> {
             if let AnnotatedFileState::None = info.file {
+                let FileId::Rev(id) = id else {
+                    unreachable!("only the base file can be wdir");
+                };
                 info.file = AnnotatedFileState::Read(OwnedLines::split(
@@ -339,5 +451,5 @@
                 info.file = AnnotatedFileState::Read(OwnedLines::split(
-                    fls.read(id)?.into_file_data()?,
+                    fls.read(id)?,
                     options.whitespace,
                 )?);
             }
@@ -411,7 +523,7 @@
     }
     // Use the same object for all ancestor checks, since it internally
     // builds a hash set of seen revisions.
-    let mut ancestors = ancestor_iter(&changelog, changelog_revision, None);
+    let mut ancestors = ancestor_iter(&state, changelog_revision, None);
     // Iterate in reverse topological order so that we visits nodes after their
     // children, that way we can propagate `descendant` correctly.
     for &id in topological_order.iter().rev() {
@@ -422,8 +534,7 @@
             ChangelogRevisionState::NotNeeded => descendant,
             ChangelogRevisionState::Needed => {
                 let revision = adjust_link_revision(
-                    &changelog,
-                    &manifestlog,
+                    &state,
                     &fls,
                     &mut ancestors,
                     descendant,
@@ -446,7 +557,10 @@
     let mut changeset_annotations = Vec::with_capacity(annotations.len());
     for Annotation { id, line_number } in annotations {
         changeset_annotations.push(ChangesetAnnotation {
-            path: fls.get(id.index).path.clone(),
+            path: match id {
+                FileId::Wdir => path.into(),
+                FileId::Rev(id) => fls.get(id.index).path.clone(),
+            },
             revision: match graph[id].revision {
                 ChangelogRevisionState::Done(revision) => revision,
                 _ => unreachable!(),
@@ -487,7 +601,7 @@
 
 /// Creates an iterator over the ancestors of `base_revision` (inclusive),
 /// stopping at `stop_revision` if provided. Panics if `base_revision` is null.
-fn ancestor_iter(
-    changelog: &Changelog,
-    base_revision: Revision,
+fn ancestor_iter<'a>(
+    state: &'a RepoState<'a>,
+    base_revision: RevisionOrWdir,
     stop_revision: Option<Revision>,
@@ -493,3 +607,8 @@
     stop_revision: Option<Revision>,
-) -> AncestorsIterator<&Changelog> {
+) -> AncestorsIterator<&'a Changelog> {
+    let base_revisions: &[Revision] = match base_revision.exclude_wdir() {
+        Some(rev) => &[rev],
+        None => &state.dirstate_parents(),
+    };
+    let stop_revision = stop_revision.unwrap_or(NULL_REVISION);
     AncestorsIterator::new(
@@ -495,7 +614,7 @@
     AncestorsIterator::new(
-        changelog,
-        [base_revision],
-        stop_revision.unwrap_or(NULL_REVISION),
+        &*state.changelog,
+        base_revisions.iter().copied(),
+        stop_revision,
         true,
     )
     .expect("base_revision should not be null")
@@ -504,7 +623,6 @@
 /// If the linkrev of `id` is in `ancestors`, returns it. Otherwise, finds and
 /// returns the first ancestor of `descendant` that introduced `id`.
 fn adjust_link_revision(
-    changelog: &Changelog,
-    manifestlog: &Manifestlog,
+    state: &RepoState<'_>,
     fls: &FilelogSet,
     ancestors: &mut AncestorsIterator<&Changelog>,
@@ -509,4 +627,4 @@
     fls: &FilelogSet,
     ancestors: &mut AncestorsIterator<&Changelog>,
-    descendant: Revision,
+    descendant: RevisionOrWdir,
     id: FileId,
@@ -512,5 +630,8 @@
     id: FileId,
-) -> Result<Revision, HgError> {
+) -> Result<RevisionOrWdir, HgError> {
+    let FileId::Rev(id) = id else {
+        return Ok(RevisionOrWdir::wdir());
+    };
     let FilelogSetItem { filelog, path } = fls.get(id.index);
     let linkrev = filelog
         .revlog
@@ -514,5 +635,5 @@
     let FilelogSetItem { filelog, path } = fls.get(id.index);
     let linkrev = filelog
         .revlog
-        .link_revision(id.revision, &changelog.revlog)?;
+        .link_revision(id.revision, &state.changelog.revlog)?;
     if ancestors.contains(linkrev).map_err(from_graph_error)? {
@@ -518,4 +639,4 @@
     if ancestors.contains(linkrev).map_err(from_graph_error)? {
-        return Ok(linkrev);
+        return Ok(linkrev.into());
     }
     let file_node = *filelog.revlog.node_from_rev(id.revision);
@@ -520,4 +641,4 @@
     }
     let file_node = *filelog.revlog.node_from_rev(id.revision);
-    for ancestor in ancestor_iter(changelog, descendant, Some(linkrev)) {
+    for ancestor in ancestor_iter(state, descendant, Some(linkrev)) {
         let ancestor = ancestor.map_err(from_graph_error)?;
@@ -523,3 +644,3 @@
         let ancestor = ancestor.map_err(from_graph_error)?;
-        let data = changelog.entry(ancestor)?.data()?;
+        let data = state.changelog.entry(ancestor)?.data()?;
         if data.files().contains(&path.as_ref()) {
@@ -525,4 +646,5 @@
         if data.files().contains(&path.as_ref()) {
-            let manifest_rev = manifestlog
+            let manifest_rev = state
+                .manifestlog
                 .revlog
                 .rev_from_node(data.manifest_node()?.into())?;
@@ -527,7 +649,8 @@
                 .revlog
                 .rev_from_node(data.manifest_node()?.into())?;
-            if let Some(entry) = manifestlog
+            if let Some(entry) = state
+                .manifestlog
                 .inexact_data_delta_parents(manifest_rev)?
                 .find_by_path(path)?
             {
                 if entry.node_id()? == file_node {
@@ -530,11 +653,11 @@
                 .inexact_data_delta_parents(manifest_rev)?
                 .find_by_path(path)?
             {
                 if entry.node_id()? == file_node {
-                    return Ok(ancestor);
+                    return Ok(ancestor.into());
                 }
             }
         }
     }
     // In theory this should be unreachable. But in case it happens, return the
     // linkrev. This matches _adjustlinkrev in context.py.
@@ -535,13 +658,13 @@
                 }
             }
         }
     }
     // In theory this should be unreachable. But in case it happens, return the
     // linkrev. This matches _adjustlinkrev in context.py.
-    Ok(linkrev)
+    Ok(linkrev.into())
 }
 
 /// Converts a [`GraphError`] to an [`HgError`].
 fn from_graph_error(err: GraphError) -> HgError {
     HgError::corrupted(err.to_string())
 }
@@ -542,6 +665,16 @@
 }
 
 /// Converts a [`GraphError`] to an [`HgError`].
 fn from_graph_error(err: GraphError) -> HgError {
     HgError::corrupted(err.to_string())
 }
+
+/// Converts a [`DirstateError`] to an [`HgError`].
+fn from_dirstate_error(err: DirstateError) -> HgError {
+    match err {
+        DirstateError::Map(err) => {
+            HgError::abort_simple(format!("dirstate error: {err}"))
+        }
+        DirstateError::Common(err) => err,
+    }
+}
diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2NhdC5ycw==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2NhdC5ycw== 100644
--- a/rust/hg-core/src/operations/cat.rs
+++ b/rust/hg-core/src/operations/cat.rs
@@ -83,7 +83,11 @@
     revset: &str,
     mut files: Vec<&'a HgPath>,
 ) -> Result<CatOutput<'a>, RevlogError> {
-    let rev = crate::revset::resolve_single(revset, repo)?;
+    let Some(rev) =
+        crate::revset::resolve_single(revset, repo)?.exclude_wdir()
+    else {
+        return Err(HgError::unsupported("cat wdir not implemented").into());
+    };
     let manifest = repo.manifest_for_rev(rev.into())?;
     let mut results: Vec<(&'a HgPath, Vec<u8>)> = vec![];
     let node = *repo.changelog()?.node_from_rev(rev);
diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2xpc3RfdHJhY2tlZF9maWxlcy5ycw==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL2xpc3RfdHJhY2tlZF9maWxlcy5ycw== 100644
--- a/rust/hg-core/src/operations/list_tracked_files.rs
+++ b/rust/hg-core/src/operations/list_tracked_files.rs
@@ -22,8 +22,12 @@
     revset: &str,
     narrow_matcher: Box<dyn Matcher + Sync>,
 ) -> Result<FilesForRev, RevlogError> {
-    let rev = crate::revset::resolve_single(revset, repo)?;
-    list_rev_tracked_files(repo, rev.into(), narrow_matcher)
+    match crate::revset::resolve_single(revset, repo)?.exclude_wdir() {
+        Some(rev) => list_rev_tracked_files(repo, rev.into(), narrow_matcher),
+        None => {
+            Err(HgError::unsupported("list wdir files not implemented").into())
+        }
+    }
 }
 
 /// List files under Mercurial control at a given revision.
diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL21vZC5ycw==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9vcGVyYXRpb25zL21vZC5ycw== 100644
--- a/rust/hg-core/src/operations/mod.rs
+++ b/rust/hg-core/src/operations/mod.rs
@@ -8,7 +8,8 @@
 mod list_tracked_files;
 mod status_rev_rev;
 pub use annotate::{
-    annotate, AnnotateOptions, AnnotateOutput, ChangesetAnnotation,
+    annotate, AnnotateOptions, AnnotateOutput, ChangesetAnnotatedFile,
+    ChangesetAnnotation,
 };
 pub use cat::{cat, CatOutput};
 pub use debugdata::debug_data;
diff --git a/rust/hg-core/src/revlog/mod.rs b/rust/hg-core/src/revlog/mod.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvbW9kLnJz..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvbW9kLnJz 100644
--- a/rust/hg-core/src/revlog/mod.rs
+++ b/rust/hg-core/src/revlog/mod.rs
@@ -126,6 +126,39 @@
 pub const WORKING_DIRECTORY_HEX: &str =
     "ffffffffffffffffffffffffffffffffffffffff";
 
+/// Either a checked revision or the working directory.
+/// Note that [`Revision`] will never hold [`WORKING_DIRECTORY_REVISION`]
+/// because that is not a valid revision in any revlog.
+#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct RevisionOrWdir(BaseRevision);
+
+impl From<Revision> for RevisionOrWdir {
+    fn from(value: Revision) -> Self {
+        RevisionOrWdir(value.0)
+    }
+}
+
+impl RevisionOrWdir {
+    /// Creates a [`RevisionOrWdir`] representing the working directory.
+    pub fn wdir() -> Self {
+        RevisionOrWdir(WORKING_DIRECTORY_REVISION.0)
+    }
+
+    /// Returns the revision, or `None` if this is the working directory.
+    pub fn exclude_wdir(self) -> Option<Revision> {
+        if self.0 == WORKING_DIRECTORY_REVISION.0 {
+            None
+        } else {
+            Some(Revision(self.0))
+        }
+    }
+
+    /// Returns true if this is the working directory.
+    pub fn is_wdir(&self) -> bool {
+        *self == Self::wdir()
+    }
+}
+
 /// The simplest expression of what we need of Mercurial DAGs.
 pub trait Graph {
     /// Return the two parents of the given `Revision`.
@@ -974,4 +1007,10 @@
             }
         };
     }
+
+    #[test]
+    fn test_revision_or_wdir_ord() {
+        let highest: RevisionOrWdir = Revision(i32::MAX - 1).into();
+        assert!(highest < RevisionOrWdir::wdir());
+    }
 }
diff --git a/rust/hg-core/src/revset.rs b/rust/hg-core/src/revset.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9oZy1jb3JlL3NyYy9yZXZzZXQucnM=..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9oZy1jb3JlL3NyYy9yZXZzZXQucnM= 100644
--- a/rust/hg-core/src/revset.rs
+++ b/rust/hg-core/src/revset.rs
@@ -4,6 +4,6 @@
 
 use crate::errors::HgError;
 use crate::repo::Repo;
-use crate::revlog::NodePrefix;
+use crate::revlog::{NodePrefix, RevisionOrWdir};
 use crate::revlog::{Revision, NULL_REVISION, WORKING_DIRECTORY_HEX};
 use crate::revlog::{Revlog, RevlogError};
@@ -8,6 +8,6 @@
 use crate::revlog::{Revision, NULL_REVISION, WORKING_DIRECTORY_HEX};
 use crate::revlog::{Revlog, RevlogError};
-use crate::Node;
+use crate::{Node, WORKING_DIRECTORY_REVISION};
 
 /// Resolve a query string into a single revision.
 ///
@@ -15,9 +15,9 @@
 pub fn resolve_single(
     input: &str,
     repo: &Repo,
-) -> Result<Revision, RevlogError> {
+) -> Result<RevisionOrWdir, RevlogError> {
     let changelog = repo.changelog()?;
 
     match input {
         "." => {
             let p1 = repo.dirstate_parents()?.p1;
@@ -19,7 +19,7 @@
     let changelog = repo.changelog()?;
 
     match input {
         "." => {
             let p1 = repo.dirstate_parents()?.p1;
-            return changelog.revlog.rev_from_node(p1.into());
+            return Ok(changelog.revlog.rev_from_node(p1.into())?.into());
         }
@@ -25,5 +25,6 @@
         }
-        "null" => return Ok(NULL_REVISION),
+        "null" => return Ok(NULL_REVISION.into()),
+        "wdir()" => return Ok(RevisionOrWdir::wdir()),
         _ => {}
     }
 
@@ -27,7 +28,7 @@
         _ => {}
     }
 
-    match resolve_rev_number_or_hex_prefix(input, &changelog.revlog) {
+    match resolve(input, &changelog.revlog) {
         Err(RevlogError::InvalidRevision(revision)) => {
             // TODO: support for the rest of the language here.
             let msg = format!("cannot parse revset '{}'", revision);
@@ -41,9 +42,8 @@
 /// the changelog, such as in `hg debugdata --manifest` CLI argument.
 ///
 /// * A non-negative decimal integer for a revision number, or
-/// * An hexadecimal string, for the unique node ID that starts with this
-///   prefix
+/// * A hexadecimal string, for the unique node ID that starts with this prefix
 pub fn resolve_rev_number_or_hex_prefix(
     input: &str,
     revlog: &Revlog,
 ) -> Result<Revision, RevlogError> {
@@ -46,6 +46,16 @@
 pub fn resolve_rev_number_or_hex_prefix(
     input: &str,
     revlog: &Revlog,
 ) -> Result<Revision, RevlogError> {
+    match resolve(input, revlog)?.exclude_wdir() {
+        Some(rev) => Ok(rev),
+        None => Err(RevlogError::WDirUnsupported),
+    }
+}
+
+fn resolve(
+    input: &str,
+    revlog: &Revlog,
+) -> Result<RevisionOrWdir, RevlogError> {
     // The Python equivalent of this is part of `revsymbol` in
     // `mercurial/scmutil.py`
@@ -50,4 +60,3 @@
     // The Python equivalent of this is part of `revsymbol` in
     // `mercurial/scmutil.py`
-
     if let Ok(integer) = input.parse::<i32>() {
@@ -53,11 +62,13 @@
     if let Ok(integer) = input.parse::<i32>() {
-        if integer.to_string() == input
-            && integer >= 0
-            && revlog.has_rev(integer.into())
-        {
-            // This is fine because we've just checked that the revision is
-            // valid for the given revlog.
-            return Ok(Revision(integer));
+        if integer.to_string() == input && integer >= 0 {
+            if integer == WORKING_DIRECTORY_REVISION.0 {
+                return Ok(RevisionOrWdir::wdir());
+            }
+            if revlog.has_rev(integer.into()) {
+                // This is fine because we've just checked that the revision is
+                // valid for the given revlog.
+                return Ok(Revision(integer).into());
+            }
         }
     }
     if let Ok(prefix) = NodePrefix::from_hex(input) {
@@ -61,7 +72,8 @@
         }
     }
     if let Ok(prefix) = NodePrefix::from_hex(input) {
-        if prefix.is_prefix_of(&Node::from_hex(WORKING_DIRECTORY_HEX).unwrap())
-        {
-            return Err(RevlogError::WDirUnsupported);
+        let wdir_node =
+            Node::from_hex(WORKING_DIRECTORY_HEX).expect("wdir hex is valid");
+        if prefix.is_prefix_of(&wdir_node) {
+            return Ok(RevisionOrWdir::wdir());
         }
@@ -67,5 +79,5 @@
         }
-        return revlog.rev_from_node(prefix);
+        return Ok(revlog.rev_from_node(prefix)?.into());
     }
     Err(RevlogError::InvalidRevision(input.to_string()))
 }
diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9yaGcvQ2FyZ28udG9tbA==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9yaGcvQ2FyZ28udG9tbA== 100644
--- a/rust/rhg/Cargo.toml
+++ b/rust/rhg/Cargo.toml
@@ -27,3 +27,4 @@
 which = "4.3.0"
 rayon = "1.7.0"
 libc = "0.2.155"
+serde_json = "1.0.140"
diff --git a/rust/rhg/src/commands/annotate.rs b/rust/rhg/src/commands/annotate.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2Fubm90YXRlLnJz..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2Fubm90YXRlLnJz 100644
--- a/rust/rhg/src/commands/annotate.rs
+++ b/rust/rhg/src/commands/annotate.rs
@@ -1,3 +1,3 @@
 use core::str;
-use std::{collections::hash_map::Entry, ffi::OsString};
+use std::{cell::Ref, collections::hash_map::Entry, ffi::OsString};
 
@@ -3,4 +3,5 @@
 
+use chrono::{DateTime, FixedOffset, Local};
 use format_bytes::format_bytes;
 use hg::{
     encoding::Encoder,
@@ -4,4 +5,5 @@
 use format_bytes::format_bytes;
 use hg::{
     encoding::Encoder,
+    errors::{HgError, IoResultExt as _},
     operations::{
@@ -7,3 +9,3 @@
     operations::{
-        annotate, AnnotateOptions, AnnotateOutput, ChangesetAnnotation,
+        annotate, AnnotateOptions, AnnotateOutput, ChangesetAnnotatedFile,
     },
@@ -9,6 +11,8 @@
     },
-    revlog::changelog::Changelog,
-    utils::strings::CleanWhitespace,
-    FastHashMap, Revision,
+    repo::Repo,
+    revlog::{changelog::Changelog, RevisionOrWdir},
+    utils::{hg_path::HgPath, strings::CleanWhitespace},
+    FastHashMap, Node, Revision, WORKING_DIRECTORY_HEX,
+    WORKING_DIRECTORY_REVISION,
 };
 
@@ -13,6 +17,9 @@
 };
 
-use crate::{error::CommandError, utils::path_utils::resolve_file_args};
+use crate::{
+    error::CommandError, ui::StdoutBuffer,
+    utils::path_utils::resolve_file_args,
+};
 
 pub const HELP_TEXT: &str = "
 show changeset information by line for each file
@@ -135,6 +142,12 @@
                 .long("ignore-space-at-eol")
                 .action(clap::ArgAction::SetTrue),
         )
+        .arg(
+            clap::Arg::new("template")
+                .help("display with template")
+                .short('T')
+                .long("template"),
+        )
         .about(HELP_TEXT)
 }
 
@@ -186,7 +199,7 @@
     }
     if include.line_number && !(include.number || include.changeset) {
         return Err(CommandError::abort(
-            "at least one of -n/-c is required for -l",
+            "abort: at least one of -n/-c is required for -l",
         ));
     }
 
@@ -197,9 +210,22 @@
         (true, true) => unreachable!(),
     };
 
-    let changelog = repo.changelog()?;
-    let mut formatter = Formatter::new(
-        &changelog,
-        invocation.ui.encoder(),
+    let template = match args.get_one::<String>("template") {
+        None => Template::Default,
+        Some(name) if name == "json" => Template::Json,
+        _ => {
+            return Err(CommandError::unsupported("only -Tjson is suppported"))
+        }
+    };
+
+    let wdir_config = if rev.is_wdir() {
+        let user = config.username()?;
+        Some(WdirConfig { user })
+    } else {
+        None
+    };
+
+    let format_config = FormatConfig {
+        template,
         include,
         verbosity,
@@ -204,35 +230,5 @@
         include,
         verbosity,
-    );
-    let mut stdout = invocation.ui.stdout_buffer();
-    for path in files {
-        match annotate(repo, &path, rev, options)? {
-            AnnotateOutput::Text(text) => {
-                let annotations = formatter.format(text.annotations)?;
-                for (annotation, line) in annotations.iter().zip(&text.lines) {
-                    stdout.write_all(&format_bytes!(
-                        b"{}: {}", annotation, line
-                    ))?;
-                }
-                if let Some(line) = text.lines.last() {
-                    if !line.ends_with(b"\n") {
-                        stdout.write_all(b"\n")?;
-                    }
-                }
-            }
-            AnnotateOutput::Binary => {
-                stdout.write_all(&format_bytes!(
-                    b"{}: binary file\n",
-                    path.as_bytes()
-                ))?;
-            }
-            AnnotateOutput::NotFound => {
-                let short = changelog.node_from_rev(rev).short();
-                return Err(CommandError::abort(format!(
-                    "{path}: no such file in rev {short:x}",
-                )));
-            }
-        }
-    }
-    stdout.flush()?;
+        wdir_config,
+    };
 
@@ -238,4 +234,8 @@
 
-    Ok(())
+    let file_results = files.iter().map(|path| -> FileResult {
+        (path.as_ref(), annotate(repo, path, rev, options))
+    });
+
+    print_output(repo, invocation.ui, &format_config, rev, file_results)
 }
 
@@ -240,7 +240,6 @@
 }
 
-struct Formatter<'a> {
-    changelog: &'a Changelog,
-    encoder: &'a Encoder,
+struct FormatConfig {
+    template: Template,
     include: Include,
     verbosity: Verbosity,
@@ -245,5 +244,5 @@
     include: Include,
     verbosity: Verbosity,
-    cache: FastHashMap<Revision, ChangesetData>,
+    wdir_config: Option<WdirConfig>,
 }
 
@@ -248,6 +247,10 @@
 }
 
-#[derive(Copy, Clone)]
+enum Template {
+    Default,
+    Json,
+}
+
 struct Include {
     user: bool,
     number: bool,
@@ -269,10 +272,9 @@
     }
 }
 
-#[derive(Copy, Clone)]
 enum Verbosity {
     Quiet,
     Default,
     Verbose,
 }
 
@@ -273,9 +275,16 @@
 enum Verbosity {
     Quiet,
     Default,
     Verbose,
 }
 
+/// Information to use for lines that changed in the working directory.
+struct WdirConfig {
+    user: Vec<u8>,
+}
+
+/// Information that we can cache per changeset.
+/// For [`Template::Json`], the values are JSON encoded.
 #[derive(Default)]
 struct ChangesetData {
     user: Option<Vec<u8>>,
@@ -283,5 +292,301 @@
     date: Option<Vec<u8>>,
 }
 
+/// Whether the "+" sigil calculation is for --number or --changeset.
+#[derive(PartialEq, Eq)]
+enum SigilFor {
+    Number,
+    Changeset,
+}
+
+type FileResult<'a> = (&'a HgPath, Result<AnnotateOutput, HgError>);
+
+fn print_output<'a>(
+    repo: &Repo,
+    ui: &crate::Ui,
+    config: &FormatConfig,
+    rev: RevisionOrWdir,
+    file_results: impl Iterator<Item = FileResult<'a>>,
+) -> Result<(), CommandError> {
+    let encoder = ui.encoder();
+    let stdout = &mut ui.stdout_buffer();
+    let dirstate_p1 = repo
+        .changelog()?
+        .rev_from_node(repo.dirstate_parents()?.p1.into())?;
+    let mut cache = Cache::new(repo)?;
+    match config.template {
+        Template::Default => {
+            for (path, output) in file_results {
+                match output? {
+                    AnnotateOutput::Text(file) => {
+                        print_lines_default(
+                            file,
+                            config,
+                            stdout,
+                            encoder,
+                            cache.for_path(path),
+                            dirstate_p1,
+                        )?;
+                    }
+                    AnnotateOutput::Binary => {
+                        stdout.write_all(&format_bytes!(
+                            b"{}: binary file\n",
+                            path.as_bytes()
+                        ))?;
+                    }
+                    AnnotateOutput::NotFound => {
+                        return handle_not_found(repo, rev, path)
+                    }
+                }
+            }
+        }
+        Template::Json => {
+            stdout.write_all(b"[")?;
+            let mut file_sep: &[u8] = b"\n";
+            for (path, output) in file_results {
+                stdout.write_all(file_sep)?;
+                file_sep = b",\n";
+                stdout.write_all(b" {\n")?;
+                match output? {
+                    AnnotateOutput::Text(file) => {
+                        print_lines_json(
+                            file,
+                            config,
+                            stdout,
+                            cache.for_path(path),
+                        )?;
+                    }
+                    AnnotateOutput::Binary => {}
+                    AnnotateOutput::NotFound => {
+                        return handle_not_found(repo, rev, path)
+                    }
+                }
+                let path = json_string(path.as_bytes())?;
+                stdout
+                    .write_all(&format_bytes!(b"  \"path\": {}\n }", path))?;
+            }
+            stdout.write_all(b"\n]\n")?;
+        }
+    }
+    stdout.flush()?;
+    Ok(())
+}
+
+type Stdout<'a> =
+    StdoutBuffer<'a, std::io::BufWriter<std::io::StdoutLock<'a>>>;
+
+fn print_lines_default(
+    file: ChangesetAnnotatedFile,
+    config: &FormatConfig,
+    stdout: &mut Stdout,
+    encoder: &Encoder,
+    mut cache: CacheForPath,
+    dirstate_p1: Revision,
+) -> Result<(), CommandError> {
+    // Serialize the annotation fields (revision, user, etc.) for each line
+    // and keep track of their maximum lengths so that we can align them.
+    let mut field_lists: Vec<Vec<Vec<u8>>> =
+        Vec::with_capacity(file.annotations.len());
+    let num_fields = config.include.count();
+    let mut widths = vec![0usize; num_fields];
+    for annotation in file.annotations {
+        let rev = annotation.revision;
+        let data = cache.get_data(rev, config)?;
+        let mut fields = Vec::with_capacity(num_fields);
+        if let Some(user) = &data.user {
+            fields.push(user.clone());
+        }
+        if config.include.number {
+            let number = rev.exclude_wdir().unwrap_or(dirstate_p1);
+            let sigil = fmt_sigil(config, rev, SigilFor::Number);
+            fields.push(format_bytes!(b"{}{}", number, sigil));
+        }
+        if let Some(changeset) = &data.changeset {
+            let sigil = fmt_sigil(config, rev, SigilFor::Changeset);
+            fields.push(format_bytes!(b"{}{}", changeset, sigil));
+        }
+        if let Some(date) = &data.date {
+            fields.push(date.clone());
+        }
+        if config.include.file {
+            fields.push(annotation.path.into_vec());
+        }
+        if config.include.line_number {
+            fields.push(format_bytes!(b"{}", annotation.line_number));
+        }
+        for (field, width) in fields.iter().zip(widths.iter_mut()) {
+            *width = std::cmp::max(*width, encoder.column_width_bytes(field));
+        }
+        field_lists.push(fields);
+    }
+    // Print each line of the file prefixed by aligned annotations.
+    let total_width = widths.iter().sum::<usize>() + num_fields - 1;
+    for (fields, line) in field_lists.iter().zip(file.lines.iter()) {
+        let mut annotation = Vec::with_capacity(total_width);
+        for (i, (field, width)) in fields.iter().zip(widths.iter()).enumerate()
+        {
+            if i > 0 {
+                let colon = config.include.line_number && i == num_fields - 1;
+                annotation.push(if colon { b':' } else { b' ' });
+            }
+            let padding = width - encoder.column_width_bytes(field);
+            annotation.resize(annotation.len() + padding, b' ');
+            annotation.extend_from_slice(field);
+        }
+        stdout.write_all(&format_bytes!(b"{}: {}", annotation, line))?;
+    }
+    if let Some(line) = file.lines.last() {
+        if !line.ends_with(b"\n") {
+            stdout.write_all(b"\n")?;
+        }
+    }
+    Ok(())
+}
+
+fn print_lines_json(
+    file: ChangesetAnnotatedFile,
+    config: &FormatConfig,
+    stdout: &mut Stdout,
+    mut cache: CacheForPath,
+) -> Result<(), CommandError> {
+    stdout.write_all(b"  \"lines\": [")?;
+    let mut line_sep: &[u8] = b"";
+    for (annotation, line) in file.annotations.iter().zip(file.lines.iter()) {
+        stdout.write_all(line_sep)?;
+        line_sep = b", ";
+
+        let mut property_sep: &[u8] = b"";
+        let mut property = |key: &[u8], value: &[u8]| {
+            let res = format_bytes!(b"{}\"{}\": {}", property_sep, key, value);
+            property_sep = b", ";
+            res
+        };
+
+        stdout.write_all(b"{")?;
+        let rev = annotation.revision;
+        let data = cache.get_data(rev, config)?;
+        if let Some(date_json) = &data.date {
+            stdout.write_all(&property(b"date", date_json))?;
+        }
+        stdout.write_all(&property(b"line", &json_string(line)?))?;
+        if config.include.line_number {
+            let lineno = annotation.line_number.to_string();
+            stdout.write_all(&property(b"lineno", lineno.as_bytes()))?;
+        }
+        if let Some(changeset_json) = &data.changeset {
+            stdout.write_all(&property(b"node", changeset_json))?;
+        }
+        if config.include.file {
+            let path = json_string(annotation.path.as_bytes())?;
+            stdout.write_all(&property(b"path", &path))?;
+        }
+        if config.include.number {
+            let number = match rev.exclude_wdir() {
+                Some(rev) => rev.0,
+                None => WORKING_DIRECTORY_REVISION.0,
+            };
+            stdout
+                .write_all(&property(b"rev", number.to_string().as_bytes()))?;
+        }
+        if let Some(user_json) = &data.user {
+            stdout.write_all(&property(b"user", user_json))?;
+        }
+        stdout.write_all(b"}")?;
+    }
+    stdout.write_all(b"],\n")?;
+    Ok(())
+}
+
+fn handle_not_found(
+    repo: &Repo,
+    rev: RevisionOrWdir,
+    path: &HgPath,
+) -> Result<(), CommandError> {
+    Err(CommandError::abort(match rev.exclude_wdir() {
+        Some(rev) => {
+            let short = repo.changelog()?.node_from_rev(rev).short();
+            format!("abort: {path}: no such file in rev {short:x}",)
+        }
+        None => {
+            format!("abort: {path}: No such file or directory")
+        }
+    }))
+}
+
+/// Returns the sigil to put after the revision number or changeset.
+fn fmt_sigil(
+    config: &FormatConfig,
+    rev: RevisionOrWdir,
+    which: SigilFor,
+) -> &'static [u8] {
+    // The "+" sigil is only used for '--rev wdir()'.
+    if config.wdir_config.is_none() {
+        return b"";
+    };
+    // With --number --changeset, put it after the changeset.
+    if which == SigilFor::Number && config.include.changeset {
+        return b"";
+    }
+    if rev.is_wdir() {
+        b"+"
+    } else {
+        b" "
+    }
+}
+
+/// A cache of [`ChangesetData`] for each changeset we've seen.
+struct Cache<'a> {
+    repo: &'a Repo,
+    changelog: Ref<'a, Changelog>,
+    map: FastHashMap<RevisionOrWdir, ChangesetData>,
+}
+
+impl<'a> Cache<'a> {
+    fn new(repo: &'a Repo) -> Result<Self, CommandError> {
+        Ok(Self {
+            repo,
+            changelog: repo.changelog()?,
+            map: Default::default(),
+        })
+    }
+
+    fn for_path(&mut self, path: &'a HgPath) -> CacheForPath<'_, 'a> {
+        CacheForPath { cache: self, path }
+    }
+}
+
+/// [`Cache`] scoped to annotating a particular file.
+struct CacheForPath<'a, 'b> {
+    cache: &'a mut Cache<'b>,
+    path: &'a HgPath,
+}
+
+impl CacheForPath<'_, '_> {
+    fn get_data(
+        &mut self,
+        rev: RevisionOrWdir,
+        config: &FormatConfig,
+    ) -> Result<&ChangesetData, CommandError> {
+        Ok(match self.cache.map.entry(rev) {
+            Entry::Occupied(occupied) => occupied.into_mut(),
+            Entry::Vacant(vacant) => vacant.insert(ChangesetData::create(
+                rev,
+                self.path,
+                self.cache.repo,
+                &self.cache.changelog,
+                config,
+            )?),
+        })
+    }
+}
+
+impl Drop for CacheForPath<'_, '_> {
+    fn drop(&mut self) {
+        // Clear the wdir cache entry, otherwise `rhg annotate --date f1 f2`
+        // would use f1's mtime for lines in f2 attributed to wdir.
+        self.cache.map.remove(&RevisionOrWdir::wdir());
+    }
+}
+
 impl ChangesetData {
     fn create(
@@ -286,4 +591,6 @@
 impl ChangesetData {
     fn create(
-        revision: Revision,
+        revision: RevisionOrWdir,
+        path: &HgPath,
+        repo: &Repo,
         changelog: &Changelog,
@@ -289,6 +596,5 @@
         changelog: &Changelog,
-        include: Include,
-        verbosity: Verbosity,
-    ) -> Result<ChangesetData, CommandError> {
-        let mut result = ChangesetData::default();
+        config: &FormatConfig,
+    ) -> Result<Self, CommandError> {
+        let include = &config.include;
         if !(include.user || include.changeset || include.date) {
@@ -294,3 +600,3 @@
         if !(include.user || include.changeset || include.date) {
-            return Ok(result);
+            return Ok(Self::default());
         }
@@ -296,10 +602,43 @@
         }
-        let entry = changelog.entry(revision)?;
-        let data = entry.data()?;
-        if include.user {
-            let user = match verbosity {
-                Verbosity::Verbose => data.user(),
-                _ => hg::utils::strings::short_user(data.user()),
+        match revision.exclude_wdir() {
+            Some(revision) => {
+                let entry = changelog.entry(revision)?;
+                let data = entry.data()?;
+                let node = *entry.as_revlog_entry().node();
+                Self::new(data.user(), node, data.timestamp()?, config)
+            }
+            None => {
+                let node = match config.template {
+                    Template::Default => repo.dirstate_parents()?.p1,
+                    Template::Json => Node::from_hex(WORKING_DIRECTORY_HEX)
+                        .expect("wdir hex should parse"),
+                };
+                let fs_path = hg::utils::hg_path::hg_path_to_path_buf(path)?;
+                let meta =
+                    repo.working_directory_vfs().symlink_metadata(&fs_path)?;
+                let mtime = meta.modified().when_reading_file(&fs_path)?;
+                let mtime = DateTime::<Local>::from(mtime).fixed_offset();
+                let user =
+                    &config.wdir_config.as_ref().expect("should be set").user;
+                Self::new(user, node, mtime, config)
+            }
+        }
+    }
+
+    fn new(
+        user: &[u8],
+        changeset: Node,
+        date: DateTime<FixedOffset>,
+        config: &FormatConfig,
+    ) -> Result<Self, CommandError> {
+        let mut result = ChangesetData::default();
+        if config.include.user {
+            let user = match config.template {
+                Template::Default => match config.verbosity {
+                    Verbosity::Verbose => user.to_vec(),
+                    _ => hg::utils::strings::short_user(user).to_vec(),
+                },
+                Template::Json => json_string(user)?,
             };
             result.user = Some(user.to_vec());
         }
@@ -303,7 +642,10 @@
             };
             result.user = Some(user.to_vec());
         }
-        if include.changeset {
-            let changeset = entry.as_revlog_entry().node().short();
-            result.changeset = Some(format!("{:x}", changeset).into_bytes());
+        if config.include.changeset {
+            let hex = match config.template {
+                Template::Default => format!("{:x}", changeset.short()),
+                Template::Json => format!("\"{:x}\"", changeset),
+            };
+            result.changeset = Some(hex.into_bytes());
         }
@@ -309,12 +651,24 @@
         }
-        if include.date {
-            let date = data.timestamp()?.format(match verbosity {
-                Verbosity::Quiet => "%Y-%m-%d",
-                _ => "%a %b %d %H:%M:%S %Y %z",
-            });
-            result.date = Some(format!("{}", date).into_bytes());
+        if config.include.date {
+            let date = match config.template {
+                Template::Default => {
+                    format!(
+                        "{}",
+                        date.format(match config.verbosity {
+                            Verbosity::Quiet => "%Y-%m-%d",
+                            _ => "%a %b %d %H:%M:%S %Y %z",
+                        })
+                    )
+                }
+                Template::Json => format!(
+                    "[{}.0, {}]",
+                    date.timestamp(),
+                    date.offset().utc_minus_local(),
+                ),
+            };
+            result.date = Some(date.into_bytes());
         }
         Ok(result)
     }
 }
 
@@ -316,91 +670,12 @@
         }
         Ok(result)
     }
 }
 
-impl<'a> Formatter<'a> {
-    fn new(
-        changelog: &'a Changelog,
-        encoder: &'a Encoder,
-        include: Include,
-        verbosity: Verbosity,
-    ) -> Self {
-        let cache = FastHashMap::default();
-        Self {
-            changelog,
-            encoder,
-            include,
-            verbosity,
-            cache,
-        }
-    }
-
-    fn format(
-        &mut self,
-        annotations: Vec<ChangesetAnnotation>,
-    ) -> Result<Vec<Vec<u8>>, CommandError> {
-        let mut lines: Vec<Vec<Vec<u8>>> =
-            Vec::with_capacity(annotations.len());
-        let num_fields = self.include.count();
-        let mut widths = vec![0usize; num_fields];
-        for annotation in annotations {
-            let revision = annotation.revision;
-            let data = match self.cache.entry(revision) {
-                Entry::Occupied(occupied) => occupied.into_mut(),
-                Entry::Vacant(vacant) => vacant.insert(ChangesetData::create(
-                    revision,
-                    self.changelog,
-                    self.include,
-                    self.verbosity,
-                )?),
-            };
-            let mut fields = Vec::with_capacity(num_fields);
-            if let Some(user) = &data.user {
-                fields.push(user.clone());
-            }
-            if self.include.number {
-                fields.push(format_bytes!(b"{}", revision));
-            }
-            if let Some(changeset) = &data.changeset {
-                fields.push(changeset.clone());
-            }
-            if let Some(date) = &data.date {
-                fields.push(date.clone());
-            }
-            if self.include.file {
-                fields.push(annotation.path.into_vec());
-            }
-            if self.include.line_number {
-                fields.push(format_bytes!(b"{}", annotation.line_number));
-            }
-            for (field, width) in fields.iter().zip(widths.iter_mut()) {
-                *width = std::cmp::max(
-                    *width,
-                    self.encoder.column_width_bytes(field),
-                );
-            }
-            lines.push(fields);
-        }
-        let total_width = widths.iter().sum::<usize>() + num_fields - 1;
-        Ok(lines
-            .iter()
-            .map(|fields| {
-                let mut bytes = Vec::with_capacity(total_width);
-                for (i, (field, width)) in
-                    fields.iter().zip(widths.iter()).enumerate()
-                {
-                    if i > 0 {
-                        let colon =
-                            self.include.line_number && i == num_fields - 1;
-                        bytes.push(if colon { b':' } else { b' ' });
-                    }
-                    let padding =
-                        width - self.encoder.column_width_bytes(field);
-                    bytes.resize(bytes.len() + padding, b' ');
-                    bytes.extend_from_slice(field);
-                }
-                bytes
-            })
-            .collect())
-    }
+fn json_string(text: &[u8]) -> Result<Vec<u8>, CommandError> {
+    serde_json::to_vec(&String::from_utf8_lossy(text)).map_err(|err| {
+        CommandError::abort(format!(
+            "failed to serialize string to JSON: {err}"
+        ))
+    })
 }
diff --git a/rust/rhg/src/commands/status.rs b/rust/rhg/src/commands/status.rs
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL3N0YXR1cy5ycw==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL3N0YXR1cy5ycw== 100644
--- a/rust/rhg/src/commands/status.rs
+++ b/rust/rhg/src/commands/status.rs
@@ -25,7 +25,7 @@
 use hg::repo::Repo;
 use hg::revlog::manifest::Manifest;
 use hg::revlog::options::{default_revlog_options, RevlogOpenOptions};
-use hg::revlog::RevlogType;
+use hg::revlog::{RevisionOrWdir, RevlogError, RevlogType};
 use hg::utils::debug::debug_wait_for_file;
 use hg::utils::files::{
     get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes,
@@ -171,5 +171,11 @@
     let Some(revs) = revs else {
         return Ok(None);
     };
+    let resolve = |input| match hg::revset::resolve_single(input, repo)?
+        .exclude_wdir()
+    {
+        Some(rev) => Ok(rev),
+        None => Err(RevlogError::WDirUnsupported),
+    };
     match revs.as_slice() {
         [] => Ok(None),
@@ -174,9 +180,6 @@
     match revs.as_slice() {
         [] => Ok(None),
-        [rev1, rev2] => Ok(Some((
-            hg::revset::resolve_single(rev1, repo)?,
-            hg::revset::resolve_single(rev2, repo)?,
-        ))),
+        [rev1, rev2] => Ok(Some((resolve(rev1)?, resolve(rev2)?))),
         _ => Err(CommandError::unsupported("expected 0 or 2 --rev flags")),
     }
 }
@@ -312,6 +315,8 @@
     let change = change
         .map(|rev| hg::revset::resolve_single(rev, repo))
         .transpose()?;
+    // Treat `rhg status --change wdir()` the same as `rhg status`.
+    let change = change.and_then(RevisionOrWdir::exclude_wdir);
 
     if verbose && has_unfinished_state(repo)? {
         return Err(CommandError::unsupported(
diff --git a/tests/test-annotate.t b/tests/test-annotate.t
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_dGVzdHMvdGVzdC1hbm5vdGF0ZS50..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_dGVzdHMvdGVzdC1hbm5vdGF0ZS50 100644
--- a/tests/test-annotate.t
+++ b/tests/test-annotate.t
@@ -516,5 +516,5 @@
 
 missing file
 
-  $ hg ann nosuchfile
+  $ hg annotate nosuchfile
   abort: nosuchfile: no such file in rev e9e6b4fa872f
@@ -520,5 +520,5 @@
   abort: nosuchfile: no such file in rev e9e6b4fa872f
-  [10]
+  [255]
 
 annotate file without '\n' on last line
 
@@ -693,8 +693,9 @@
   $ rm baz
 
   $ hg annotate -ncr "wdir()" baz
-  abort: $TESTTMP\repo/baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
+  abort: baz: $ENOENT$ (rhg !)
+  abort: $TESTTMP\repo/baz: $ENOENT$ (no-rhg windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-rhg no-windows !)
   [255]
 
 annotate removed file
@@ -702,8 +703,15 @@
   $ hg rm baz
 
   $ hg annotate -ncr "wdir()" baz
-  abort: $TESTTMP\repo/baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
+  abort: baz: $ENOENT$ (rhg !)
+  abort: $TESTTMP\repo/baz: $ENOENT$ (no-rhg windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-rhg no-windows !)
+  [255]
+
+annotate file neither in repo nor working copy
+
+  $ hg annotate -ncr "wdir()" never_existed
+  abort: never_existed: $ENOENT$
   [255]
 
   $ hg revert --all --no-backup --quiet
@@ -1052,6 +1060,20 @@
    }
   ]
 
+Test non-UTF8 (should use U+FFFD replacement character)
+TODO: fix Python which instead emits invalid JSON
+
+  $ "$PYTHON" -c 'open("latin1", "wb").write(b"\xc9")'
+  $ hg ci -qAm 'add latin1 file'
+  $ hg annotate -Tjson latin1
+  [
+   {
+    "lines": [{"line": "\xed\xb3\x89", "rev": 35}], (esc) (no-rhg known-bad-output !)
+    "lines": [{"line": "\xef\xbf\xbd", "rev": 35}], (esc) (rhg !)
+    "path": "latin1"
+   }
+  ]
+
 Test annotate with whitespace options
 
   $ cd ..
diff --git a/tests/test-fastannotate-hg.t b/tests/test-fastannotate-hg.t
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_dGVzdHMvdGVzdC1mYXN0YW5ub3RhdGUtaGcudA==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_dGVzdHMvdGVzdC1mYXN0YW5ub3RhdGUtaGcudA== 100644
--- a/tests/test-fastannotate-hg.t
+++ b/tests/test-fastannotate-hg.t
@@ -456,5 +456,5 @@
 
 missing file
 
-  $ hg ann nosuchfile
+  $ hg annotate nosuchfile
   abort: nosuchfile: no such file in rev e9e6b4fa872f
@@ -460,5 +460,5 @@
   abort: nosuchfile: no such file in rev e9e6b4fa872f
-  [10]
+  [255]
 
 annotate file without '\n' on last line
 
diff --git a/tests/test-rhg.t b/tests/test-rhg.t
index d81714a1c88d5143d3999afaa6e4eab4f32f590e_dGVzdHMvdGVzdC1yaGcudA==..a5b3ba7d67688d5b951e56c83c00bcb0bfd593ca_dGVzdHMvdGVzdC1yaGcudA== 100644
--- a/tests/test-rhg.t
+++ b/tests/test-rhg.t
@@ -202,6 +202,15 @@
   test 0 1c9e69808da7 Thu Jan 01 00:00:00 1970 +0000 original:1: original content
   $ $NO_FALLBACK rhg blame -r . -ufdnclawbBZ --no-follow original
   test 0 1c9e69808da7 Thu Jan 01 00:00:00 1970 +0000 original:1: original content
+  $ $NO_FALLBACK rhg annotate -r 'wdir()' original
+  0 : original content
+  $ $NO_FALLBACK rhg annotate -Tjson original
+  [
+   {
+    "lines": [{"line": "original content\n", "rev": 0}],
+    "path": "original"
+   }
+  ]
 
 Fallback to Python
   $ $NO_FALLBACK rhg cat original --exclude="*.rs"