diff --git a/Makefile b/Makefile
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_TWFrZWZpbGU=..48dfba3df18ce7311ebc2eb22932589da8d0b253_TWFrZWZpbGU= 100644
--- a/Makefile
+++ b/Makefile
@@ -77,7 +77,7 @@
 
 .PHONY: build-rhg
 build-rhg:
-	(cd rust/rhg; cargo build --release)
+	(cd rust/rhg; cargo build --release --features "$(HG_RUST_FEATURES)")
 
 .PHONY: wheel
 wheel:
@@ -100,7 +100,6 @@
 	$(MAKE) -C doc clean
 	$(MAKE) -C contrib/chg distclean
 	rm -rf rust/target
-	rm -f mercurial/rustext.so
 
 .PHONY: clean
 clean: cleanbutpackages
@@ -171,8 +170,9 @@
 rust-tests:
 	cd $(HGROOT)/rust \
 		&& $(CARGO) test --quiet --all \
-		--features "$(HG_RUST_FEATURES)" --no-default-features
+		   --features "full-tracing" --no-default-features \
+		&&  $(CARGO) test --quiet --all --no-default-features
 
 .PHONY: cargo-clippy
 cargo-clippy:
 	cd $(HGROOT)/rust \
@@ -175,8 +175,9 @@
 
 .PHONY: cargo-clippy
 cargo-clippy:
 	cd $(HGROOT)/rust \
-		&& $(CARGO) clippy --all --features "$(HG_RUST_FEATURES)" -- -D warnings
+		&& $(CARGO) clippy --all -- -D warnings \
+		&& $(CARGO) clippy --all --features "full-tracing" -- -D warnings
 
 .PHONY: check-code
 check-code:
diff --git a/mercurial/copies.py b/mercurial/copies.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL2NvcGllcy5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL2NvcGllcy5weQ== 100644
--- a/mercurial/copies.py
+++ b/mercurial/copies.py
@@ -28,7 +28,7 @@
     sidedata as sidedatamod,
 )
 
-rustmod = policy.importrust("copy_tracing", pyo3=True)
+rustmod = policy.importrust("copy_tracing")
 
 
 def _filter(src, dst, t):
diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL2RlYnVnY29tbWFuZHMucHk=..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL2RlYnVnY29tbWFuZHMucHk= 100644
--- a/mercurial/debugcommands.py
+++ b/mercurial/debugcommands.py
@@ -1945,7 +1945,7 @@
     )
 
     try:
-        from . import rustext  # pytype: disable=import-error
-
-        rustext.__doc__  # trigger lazy import
+        from . import pyo3_rustext  # pytype: disable=import-error
+
+        pyo3_rustext.__doc__  # trigger lazy import
     except ImportError:
@@ -1951,5 +1951,5 @@
     except ImportError:
-        rustext = None
+        pyo3_rustext = None
 
     security = set(sslutil.supportedprotocols)
     if sslutil.hassni:
@@ -1981,7 +1981,7 @@
     fm.plain(
         _(
             b"checking Rust extensions (%s)\n"
-            % (b'missing' if rustext is None else b'installed')
+            % (b'missing' if pyo3_rustext is None else b'installed')
         ),
     )
 
@@ -2015,5 +2015,5 @@
     )
 
     rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
-    rustext = rustandc  # for now, that's the only case
+    pyo3_rustext = rustandc  # for now, that's the only case
     cext = policy.policy in (b'c', b'allow') or rustandc
@@ -2019,5 +2019,5 @@
     cext = policy.policy in (b'c', b'allow') or rustandc
-    nopure = cext or rustext
+    nopure = cext or pyo3_rustext
     if nopure:
         err = None
         try:
@@ -2031,8 +2031,8 @@
 
                 # quiet pyflakes
                 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
-            if rustext:
-                from .rustext import (  # pytype: disable=import-error
+            if pyo3_rustext:
+                from .pyo3_rustext import (  # pytype: disable=import-error
                     ancestor,
                     dirstate,
                 )
diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL2RpcnN0YXRlLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL2RpcnN0YXRlLnB5 100644
--- a/mercurial/dirstate.py
+++ b/mercurial/dirstate.py
@@ -54,7 +54,7 @@
 )
 
 parsers = policy.importmod('parsers')
-rustmod = policy.importrust('dirstate', pyo3=True)
+rustmod = policy.importrust('dirstate')
 
 HAS_FAST_DIRSTATE_V2 = rustmod is not None
 
diff --git a/mercurial/dirstatemap.py b/mercurial/dirstatemap.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL2RpcnN0YXRlbWFwLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL2RpcnN0YXRlbWFwLnB5 100644
--- a/mercurial/dirstatemap.py
+++ b/mercurial/dirstatemap.py
@@ -36,7 +36,7 @@
     )
 
 parsers = policy.importmod('parsers')
-rustmod = policy.importrust('dirstate', pyo3=True)
+rustmod = policy.importrust('dirstate')
 
 propertycache = util.propertycache
 
diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL2xvY2FscmVwby5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL2xvY2FscmVwby5weQ== 100644
--- a/mercurial/localrepo.py
+++ b/mercurial/localrepo.py
@@ -3804,9 +3804,7 @@
         requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
 
     # The feature is disabled unless a fast implementation is available.
-    persistent_nodemap_default = (
-        policy.importrust('revlog', pyo3=True) is not None
-    )
+    persistent_nodemap_default = policy.importrust('revlog') is not None
     if ui.configbool(
         b'format', b'use-persistent-nodemap', persistent_nodemap_default
     ):
diff --git a/mercurial/match.py b/mercurial/match.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL21hdGNoLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL21hdGNoLnB5 100644
--- a/mercurial/match.py
+++ b/mercurial/match.py
@@ -41,7 +41,7 @@
     matcher as int_matcher,
 )
 
-rustmod = policy.importrust('dirstate', pyo3=True)
+rustmod = policy.importrust('dirstate')
 
 allpatternkinds = (
     b're',
diff --git a/mercurial/merge.py b/mercurial/merge.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL21lcmdlLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL21lcmdlLnB5 100644
--- a/mercurial/merge.py
+++ b/mercurial/merge.py
@@ -53,7 +53,7 @@
     """The merge action, data about the merge, and message about the merge, for
     the keyed file."""
 
-rust_update_mod = policy.importrust("update", pyo3=True)
+rust_update_mod = policy.importrust("update")
 
 _pack = struct.pack
 _unpack = struct.unpack
@@ -1844,7 +1844,7 @@
         b'fsmonitor', b'warn_update_file_count'
     )
     # avoid cycle dirstate -> sparse -> merge -> dirstate
-    dirstate_rustmod = policy.importrust("dirstate", pyo3=True)
+    dirstate_rustmod = policy.importrust("dirstate")
 
     if dirstate_rustmod is not None:
         # When using rust status, fsmonitor becomes necessary at higher sizes
@@ -1980,7 +1980,7 @@
         maybe_wlock = util.nullcontextmanager()
     else:
         maybe_wlock = repo.wlock()
-    with maybe_wlock:
+    with maybe_wlock, util.rust_tracing_span("under wlock"):
         if wc is None:
             wc = repo[None]
         pl = wc.parents()
diff --git a/mercurial/pathutil.py b/mercurial/pathutil.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3BhdGh1dGlsLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3BhdGh1dGlsLnB5 100644
--- a/mercurial/pathutil.py
+++ b/mercurial/pathutil.py
@@ -24,7 +24,7 @@
 
 from .interfaces import misc as int_misc
 
-rustdirs = policy.importrust('dirstate', 'Dirs', pyo3=True)
+rustdirs = policy.importrust('dirstate', 'Dirs')
 parsers = policy.importmod('parsers')
 
 
diff --git a/mercurial/policy.py b/mercurial/policy.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3BvbGljeS5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3BvbGljeS5weQ== 100644
--- a/mercurial/policy.py
+++ b/mercurial/policy.py
@@ -132,9 +132,7 @@
     return policy.endswith(b'-allow')
 
 
-def importrust(
-    modname: str, member: Optional[str] = None, default=None, pyo3=False
-):
+def importrust(modname: str, member: Optional[str] = None, default=None):
     """Import Rust module according to policy and availability.
 
     If policy isn't a Rust one, this returns `default`.
@@ -145,7 +143,7 @@
     if not policy.startswith(b'rust'):
         return default
 
-    dlib_name = "pyo3_rustext" if pyo3 else "rustext"
+    dlib_name = "pyo3_rustext"
 
     try:
         mod = _importfrom(dlib_name, modname)
diff --git a/mercurial/revlog.py b/mercurial/revlog.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3JldmxvZy5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3JldmxvZy5weQ== 100644
--- a/mercurial/revlog.py
+++ b/mercurial/revlog.py
@@ -136,9 +136,9 @@
 REVIDX_RAWTEXT_CHANGING_FLAGS
 
 parsers = policy.importmod('parsers')
-rustancestor = policy.importrust('ancestor', pyo3=True)
-rustdagop = policy.importrust('dagop', pyo3=True)
-rustrevlog = policy.importrust('revlog', pyo3=True)
+rustancestor = policy.importrust('ancestor')
+rustdagop = policy.importrust('dagop')
+rustrevlog = policy.importrust('revlog')
 
 # Aliased for performance.
 _zlibdecompress = zlib.decompress
diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3NjbXV0aWwucHk=..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3NjbXV0aWwucHk= 100644
--- a/mercurial/scmutil.py
+++ b/mercurial/scmutil.py
@@ -83,7 +83,7 @@
     )
 
 parsers = policy.importmod('parsers')
-rustrevlog = policy.importrust('revlog', pyo3=True)
+rustrevlog = policy.importrust('revlog')
 
 termsize = scmplatform.termsize
 
diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3NldGRpc2NvdmVyeS5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3NldGRpc2NvdmVyeS5weQ== 100644
--- a/mercurial/setdiscovery.py
+++ b/mercurial/setdiscovery.py
@@ -276,7 +276,7 @@
 pure_partialdiscovery = partialdiscovery
 
 partialdiscovery = policy.importrust(
-    'discovery', member='PartialDiscovery', default=partialdiscovery, pyo3=True
+    'discovery', member='PartialDiscovery', default=partialdiscovery
 )
 
 
diff --git a/mercurial/testing/revlog.py b/mercurial/testing/revlog.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3Rlc3RpbmcvcmV2bG9nLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3Rlc3RpbmcvcmV2bG9nLnB5 100644
--- a/mercurial/testing/revlog.py
+++ b/mercurial/testing/revlog.py
@@ -35,7 +35,7 @@
     cparsers = None
 
 try:
-    from ..rustext import (  # pytype: disable=import-error
+    from ..pyo3_rustext import (  # pytype: disable=import-error
         revlog as rust_revlog,
     )
 
@@ -44,16 +44,6 @@
     rust_revlog = None
 
 
-try:
-    from ..pyo3_rustext import (  # pytype: disable=import-error
-        revlog as pyo3_revlog,
-    )
-
-    pyo3_revlog.__name__  # force actual import
-except ImportError:
-    pyo3_revlog = None
-
-
 @unittest.skipIf(
     cparsers is None,
     (
@@ -112,17 +102,3 @@
 
     def parserustindex(self, data=None):
         return revlog.RustIndexProxy(self.make_inner_revlog(data=data))
-
-
-@unittest.skipIf(
-    pyo3_revlog is None,
-    'The Rust PyO3 revlog module is not available. It is needed for this test.',
-)
-class PyO3RevlogBasedTestBase(RustRevlogBasedTestBase):
-    @classmethod
-    def irl_class(cls):
-        return pyo3_revlog.InnerRevlog
-
-    @classmethod
-    def nodetree(cls, idx):
-        return pyo3_revlog.NodeTree(idx)
diff --git a/mercurial/util.py b/mercurial/util.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_bWVyY3VyaWFsL3V0aWwucHk=..48dfba3df18ce7311ebc2eb22932589da8d0b253_bWVyY3VyaWFsL3V0aWwucHk= 100644
--- a/mercurial/util.py
+++ b/mercurial/util.py
@@ -3458,3 +3458,31 @@
         pass
     except KeyError:  # unknown parameter
         pass
+
+
+def rust_tracing_span(name: str):
+    """Maybe¹ returns a context manager that calls into the Rust extensions's
+    tracing system to register a span, creating it on `__enter__` and closing
+    it on `__exit__`.
+
+    See "Profiling and tracing" in `rust/README.rst` for more information.
+
+    [1] The context manager does nothing if the Rust extensions are unavailable
+    or have not been compiled with the `full-tracing` feature.
+    """
+    try:
+        tracer = policy.importrust("tracing", member="tracer", default=None)
+    except ImportError:
+        tracer = None
+
+    if tracer is None:
+        import contextlib
+
+        @contextlib.contextmanager
+        def trace_span(name: str):
+            yield
+
+    else:
+        trace_span = tracer.span
+
+    return trace_span(name)
diff --git a/rust/Cargo.lock b/rust/Cargo.lock
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9DYXJnby5sb2Nr..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9DYXJnby5sb2Nr 100644
--- a/rust/Cargo.lock
+++ b/rust/Cargo.lock
@@ -95,17 +95,6 @@
 ]
 
 [[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi",
- "libc",
- "winapi",
-]
-
-[[package]]
 name = "autocfg"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -184,7 +173,7 @@
 checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
 dependencies = [
  "memchr",
- "regex-automata",
+ "regex-automata 0.4.9",
  "serde",
 ]
 
@@ -338,18 +327,6 @@
 ]
 
 [[package]]
-name = "cpython"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43b398a2c65baaf5892f10bb69b52508bf7a993380cc4ecd3785aaebb5c79389"
-dependencies = [
- "libc",
- "num-traits",
- "paste",
- "python3-sys",
-]
-
-[[package]]
 name = "crc32fast"
 version = "1.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -413,6 +390,20 @@
 ]
 
 [[package]]
+name = "dashmap"
+version = "6.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+ "hashbrown 0.14.5",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
+]
+
+[[package]]
 name = "derive_more"
 version = "0.99.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -490,42 +481,6 @@
 checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
 
 [[package]]
-name = "env_filter"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab"
-dependencies = [
- "log",
- "regex",
-]
-
-[[package]]
-name = "env_logger"
-version = "0.9.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
-dependencies = [
- "atty",
- "humantime",
- "log",
- "regex",
- "termcolor",
-]
-
-[[package]]
-name = "env_logger"
-version = "0.11.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d"
-dependencies = [
- "anstream",
- "anstyle",
- "env_filter",
- "humantime",
- "log",
-]
-
-[[package]]
 name = "errno"
 version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -639,9 +594,15 @@
 ]
 
 [[package]]
+name = "hashbrown"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
+
+[[package]]
 name = "heck"
 version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
 
 [[package]]
@@ -642,18 +603,9 @@
 name = "heck"
 version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
 
 [[package]]
-name = "hermit-abi"
-version = "0.1.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
-
-[[package]]
 name = "hex"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -685,8 +637,6 @@
  "itertools",
  "lazy_static",
  "libc",
- "log",
- "logging_timer",
  "memchr",
  "memmap2",
  "once_cell",
@@ -696,8 +646,8 @@
  "rand_pcg",
  "rayon",
  "regex",
- "regex-automata",
- "regex-syntax",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
  "same-file",
  "schnellru",
  "self_cell",
@@ -707,6 +657,7 @@
  "tempfile",
  "thread_local",
  "toml",
+ "tracing",
  "twox-hash",
  "unicode-width 0.2.0",
  "uuid",
@@ -714,23 +665,7 @@
 ]
 
 [[package]]
-name = "hg-cpython"
-version = "0.1.0"
-dependencies = [
- "cpython",
- "crossbeam-channel",
- "env_logger 0.11.5",
- "hg-core",
- "libc",
- "log",
- "logging_timer",
- "python3-sys",
- "stable_deref_trait",
- "vcsgraph",
-]
-
-[[package]]
 name = "hg-pyo3"
 version = "0.1.0"
 dependencies = [
  "crossbeam-channel",
@@ -733,5 +668,6 @@
 name = "hg-pyo3"
 version = "0.1.0"
 dependencies = [
  "crossbeam-channel",
+ "dashmap",
  "derive_more",
@@ -737,3 +673,2 @@
  "derive_more",
- "env_logger 0.9.3",
  "hg-core",
@@ -739,6 +674,4 @@
  "hg-core",
- "log",
- "logging_timer",
  "pyo3",
  "pyo3-sharedref",
  "stable_deref_trait",
@@ -742,6 +675,9 @@
  "pyo3",
  "pyo3-sharedref",
  "stable_deref_trait",
+ "tracing",
+ "tracing-chrome",
+ "tracing-subscriber",
  "vcsgraph",
 ]
 
@@ -755,12 +691,6 @@
 ]
 
 [[package]]
-name = "humantime"
-version = "2.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
-
-[[package]]
 name = "iana-time-zone"
 version = "0.1.61"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -842,6 +772,12 @@
 ]
 
 [[package]]
+name = "itoa"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
+
+[[package]]
 name = "jobserver"
 version = "0.1.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -906,9 +842,19 @@
 checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
 
 [[package]]
+name = "lock_api"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
 name = "log"
 version = "0.4.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
 
 [[package]]
@@ -909,9 +855,9 @@
 name = "log"
 version = "0.4.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
 
 [[package]]
-name = "logging_timer"
-version = "1.1.1"
+name = "matchers"
+version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -917,3 +863,3 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5669c09dbcb4a0b5f6de8364154495574238e18d6736bbdaa7726307f3268471"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
 dependencies = [
@@ -919,18 +865,5 @@
 dependencies = [
- "log",
- "logging_timer_proc_macros",
-]
-
-[[package]]
-name = "logging_timer_proc_macros"
-version = "1.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "27906ca51651609191eeb2d1fdc6b52b8024789ec188b07aad88b6dfbe392fbe"
-dependencies = [
- "log",
- "proc-macro2",
- "quote",
- "syn 1.0.109",
+ "regex-automata 0.1.10",
 ]
 
 [[package]]
@@ -989,6 +922,16 @@
 ]
 
 [[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
 name = "num-traits"
 version = "0.2.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1032,6 +975,12 @@
 ]
 
 [[package]]
-name = "paste"
-version = "1.0.15"
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1037,5 +986,18 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
 
 [[package]]
 name = "pkg-config"
@@ -1150,16 +1112,6 @@
 ]
 
 [[package]]
-name = "python3-sys"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f53ef6740367a09718d2cd21ba15b0d7972342a38e554736bcee7773e45c9f5"
-dependencies = [
- "libc",
- "regex",
-]
-
-[[package]]
 name = "quote"
 version = "1.0.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1321,8 +1273,17 @@
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-automata",
- "regex-syntax",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax 0.6.29",
 ]
 
 [[package]]
@@ -1333,8 +1294,8 @@
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-syntax",
+ "regex-syntax 0.8.5",
 ]
 
 [[package]]
 name = "regex-syntax"
@@ -1337,7 +1298,13 @@
 ]
 
 [[package]]
 name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
+[[package]]
+name = "regex-syntax"
 version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
@@ -1349,9 +1316,8 @@
  "chrono",
  "clap",
  "derive_more",
- "env_logger 0.11.5",
  "format-bytes",
  "hg-core",
  "home",
  "lazy_static",
  "libc",
@@ -1353,10 +1319,8 @@
  "format-bytes",
  "hg-core",
  "home",
  "lazy_static",
  "libc",
- "log",
- "logging_timer",
  "rayon",
  "regex",
  "shellexpand",
@@ -1360,6 +1324,9 @@
  "rayon",
  "regex",
  "shellexpand",
+ "tracing",
+ "tracing-chrome",
+ "tracing-subscriber",
  "which",
  "whoami",
 ]
@@ -1387,6 +1354,12 @@
 ]
 
 [[package]]
+name = "ryu"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd"
+
+[[package]]
 name = "same-file"
 version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1407,6 +1380,12 @@
 ]
 
 [[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
 name = "self_cell"
 version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1439,6 +1418,18 @@
 ]
 
 [[package]]
+name = "serde_json"
+version = "1.0.140"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
 name = "serde_spanned"
 version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1472,6 +1463,15 @@
 ]
 
 [[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
 name = "shellexpand"
 version = "3.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1499,6 +1499,12 @@
 ]
 
 [[package]]
+name = "smallvec"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
+
+[[package]]
 name = "stable_deref_trait"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1570,15 +1576,6 @@
 ]
 
 [[package]]
-name = "termcolor"
-version = "1.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
-dependencies = [
- "winapi-util",
-]
-
-[[package]]
 name = "thiserror"
 version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1643,6 +1640,79 @@
 ]
 
 [[package]]
+name = "tracing"
+version = "0.1.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.89",
+]
+
+[[package]]
+name = "tracing-chrome"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf0a738ed5d6450a9fb96e86a23ad808de2b727fd1394585da5cdd6788ffe724"
+dependencies = [
+ "serde_json",
+ "tracing-core",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
 name = "twox-hash"
 version = "1.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1699,6 +1769,12 @@
 ]
 
 [[package]]
+name = "valuable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
+
+[[package]]
 name = "vcpkg"
 version = "0.2.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/rust/Cargo.toml b/rust/Cargo.toml
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9DYXJnby50b21s..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9DYXJnby50b21s 100644
--- a/rust/Cargo.toml
+++ b/rust/Cargo.toml
@@ -1,5 +1,5 @@
 [workspace]
-members = ["hg-core", "hg-cpython", "hg-pyo3", "rhg", "pyo3-sharedref"]
+members = ["hg-core", "hg-pyo3", "rhg", "pyo3-sharedref"]
 exclude = ["chg", "hgcli"]
 resolver = "2"
 
diff --git a/rust/README.rst b/rust/README.rst
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9SRUFETUUucnN0..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9SRUFETUUucnN0 100644
--- a/rust/README.rst
+++ b/rust/README.rst
@@ -12,7 +12,7 @@
 - hgcli. A project that provides a (mostly) self-contained "hg" binary,
   for ease of deployment and a bit of speed, using PyOxidizer. See
   ``hgcli/README.md``.
-- hg-core (and hg-cpython): implementation of some
+- hg-core (and hg-pyo3): implementation of some
   functionality of mercurial in Rust, e.g. ancestry computations in
   revision graphs, status or pull discovery. The top-level ``Cargo.toml`` file
   defines a workspace containing these crates.
@@ -33,6 +33,9 @@
   checking Rust extensions (installed)
   checking module policy (rust+c-allow)
 
+
+**note: the HGWITHRUSTEXT environment variable is deprecated and will be removed
+in Mercurial 7.1, do not use it.**
 If the environment variable ``HGWITHRUSTEXT=cpython`` is set, the Rust
 extension will be used by default unless ``--no-rust``.
 
@@ -43,8 +46,8 @@
 ================
 
 In the future, compile-time opt-ins may be added
-to the ``features`` section in ``hg-cpython/Cargo.toml``.
+to the ``features`` section in ``hg-pyo3/Cargo.toml``.
 
 To use features from the Makefile, use the ``HG_RUST_FEATURES`` environment
 variable: for instance ``HG_RUST_FEATURES="some-feature other-feature"``.
 
@@ -47,7 +50,7 @@
 
 To use features from the Makefile, use the ``HG_RUST_FEATURES`` environment
 variable: for instance ``HG_RUST_FEATURES="some-feature other-feature"``.
 
-Profiling
-=========
+Profiling and tracing
+=====================
 
@@ -53,8 +56,14 @@
 
-Setting the environment variable ``RUST_LOG=trace`` will make hg print
-a few high level rust-related performance numbers. It can also
-indicate why the rust code cannot be used (say, using lookarounds in
-hgignore).
+The terminology below assumes the oversimplification of profiling being mostly
+sampling-based or an otherwise statistical way of looking at the performance
+of Mercurial, whereas tracing is the deliberate attempt at looking into all
+relevant events, determined by explicit tracing code.
+
+The line is blurred when using things like Intel Processor Trace, but if you're
+using Intel PT, you probably know.
+
+Profiling
+---------
 
 Creating a ``.cargo/config`` file with the following content enables
 debug information in optimized builds. This make profiles more informative
@@ -76,6 +85,56 @@
   $ make PURE=--rust local # Don't forget to recompile after a code change
   $ py-spy record --native --output /tmp/profile.svg -- ./hg ...
 
+Tracing
+-------
+
+Simple stderr
+~~~~~~~~~~~~~
+
+Setting the environment variable ``RUST_LOG`` to any valid level (``error``,
+``warn``, ``info``, ``debug`` and ``trace``, in ascending order of verbosity)
+will make hg print a few high level rust-related performance numbers to stderr.
+It can also indicate why the rust code cannot be used (say, using lookarounds
+in hgignore). ``RUST_LOG`` usage can be further refined, please refer to the
+``tracing-subscriber`` rust crate for more details on ``EnvFilter``.
+
+Example::
+
+  $ make build-rhg
+  $ RUST_LOG=trace rust/target/release/rhg status > /dev/null
+  2025-03-04T12:14:42.336153Z DEBUG hg::utils: Capped the rayon threadpool to 16 threads
+  2025-03-04T12:14:42.336901Z DEBUG config_setup: rhg: close time.busy=730µs time.idle=2.56µs
+  2025-03-04T12:14:42.338668Z DEBUG repo setup:configitems.toml: hg::config::config_items: close time.busy=1.70ms time.idle=270ns
+  2025-03-04T12:14:42.338682Z DEBUG repo setup: rhg: close time.busy=1.77ms time.idle=471ns
+  2025-03-04T12:14:42.338716Z DEBUG main_with_result:CLI and command setup:new_v2: hg::dirstate::dirstate_map: close time.busy=291ns time.idle=210ns
+  2025-03-04T12:14:42.354094Z DEBUG main_with_result:CLI and command setup:blackbox: rhg: close time.busy=15.2ms time.idle=622ns
+  2025-03-04T12:14:42.354107Z DEBUG main_with_result:CLI and command setup: rhg: close time.busy=15.4ms time.idle=270ns
+  2025-03-04T12:14:42.356250Z DEBUG main_with_result:rhg status:status:build_regex_match:re_matcher: hg::matchers: close time.busy=961µs time.idle=541ns
+  2025-03-04T12:14:42.356291Z DEBUG main_with_result:rhg status:status:build_regex_match: hg::matchers: close time.busy=1.69ms time.idle=420ns
+  2025-03-04T12:14:42.374671Z DEBUG main_with_result:rhg status:status: hg::dirstate::status: close time.busy=20.5ms time.idle=532ns
+  2025-03-04T12:14:42.374700Z DEBUG main_with_result:rhg status: rhg::commands::status: close time.busy=20.6ms time.idle=470ns
+  2025-03-04T12:14:42.380897Z DEBUG main_with_result:blackbox: rhg: close time.busy=6.19ms time.idle=932ns
+  2025-03-04T12:14:42.380918Z DEBUG main_with_result: rhg: close time.busy=42.2ms time.idle=211ns
+
+Full timeline view
+~~~~~~~~~~~~~~~~~~
+
+If compiled with the ``full-tracing`` feature, two things happen:
+  - ``RUST_LOG`` writes a chrome-trace to a file instead of logging to stderr
+  - More (maybe extremely) verbose tracing is available at the ``trace`` level
+    that would otherwise get compiled out entirely.
+
+The file defaults to ``./trace-{unix epoch in micros}.json``, but can be
+overridden via the ``HG_TRACE_PATH`` environment variable.
+
+Example::
+  $ HG_RUST_FEATURES="full-tracing" make local PURE=--rust
+  $ HG_TRACE_PATH=/tmp/trace.json RUST_LOG=debug ./hg st > /dev/null
+
+In this case, opening ``/tmp/trace.json`` in `ui.perfetto.dev` will show a
+timeline of all recorded spans and events, which can be very useful for making
+sense of what is happening.
+
 Developing Rust
 ===============
 
@@ -99,5 +158,5 @@
 Build and development
 ---------------------
 
-Go to the ``hg-cpython`` folder::
+Go to the ``hg-pyo3`` folder::
 
@@ -103,5 +162,5 @@
 
-  $ cd rust/hg-cpython
+  $ cd rust/hg-pyo3
 
 Or, only the ``hg-core`` folder. Be careful not to break compatibility::
 
@@ -126,7 +185,7 @@
 You can run only the rust-specific tests (as opposed to tests of
 mercurial as a whole) with::
 
-  $ cargo test --all
+  $ cargo test --all --no-default-features
 
 Formatting the code
 -------------------
@@ -152,7 +211,7 @@
 Our CI enforces that the code is free of Clippy warnings, so you might
 want to run it on your side before submitting your changes. Simply do::
 
-  % cargo clippy
+  $ cargo clippy
 
 from the top of the Rust workspace. Clippy is part of the default
 ``rustup`` install, so it should work right away. In case it would
diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL0NhcmdvLnRvbWw=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL0NhcmdvLnRvbWw= 100644
--- a/rust/hg-core/Cargo.toml
+++ b/rust/hg-core/Cargo.toml
@@ -11,6 +11,9 @@
 [lib]
 name = "hg"
 
+[features]
+full-tracing = []
+
 [dependencies]
 bitflags = "1.3.2"
 bytes-cast = "0.3.0"
@@ -24,7 +27,6 @@
 itertools = "0.10.5"
 lazy_static = "1.4.0"
 libc = "0.2.137"
-logging_timer = "1.1.0"
 memchr = "2"
 rand = "0.8.5"
 rand_pcg = "0.3.1"
@@ -40,7 +42,6 @@
 toml = "0.6"
 thread_local = "1.1.4"
 crossbeam-channel = "0.5.6"
-log = "0.4.17"
 memmap2 = { version = "0.5.8", features = ["stable_deref_trait"] }
 zstd = "0.12"
 format-bytes = "0.3.0"
@@ -56,6 +57,7 @@
 unicode-width = "0.2.0"
 bit-set = "0.8.0"
 static_assertions_next = "1.1.2"
+tracing = { version = "0.1.41", features = ["attributes", "log"] }
 
 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
 # we have a clearer view of which backend is the fastest.
diff --git a/rust/hg-core/src/config/config_items.rs b/rust/hg-core/src/config/config_items.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9jb25maWcvY29uZmlnX2l0ZW1zLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9jb25maWcvY29uZmlnX2l0ZW1zLnJz 100644
--- a/rust/hg-core/src/config/config_items.rs
+++ b/rust/hg-core/src/config/config_items.rs
@@ -524,7 +524,11 @@
     }
 
     /// Returns `Self`, given the contents of `mercurial/configitems.toml`
-    #[logging_timer::time("trace")]
+    #[tracing::instrument(
+        level = "debug",
+        skip_all,
+        name = "configitems.toml"
+    )]
     pub fn from_contents(contents: &str) -> Result<Self, HgError> {
         let mut from_file: ConfigItems =
             toml::from_str(contents).map_err(|e| {
diff --git a/rust/hg-core/src/dagops.rs b/rust/hg-core/src/dagops.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9kYWdvcHMucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9kYWdvcHMucnM= 100644
--- a/rust/hg-core/src/dagops.rs
+++ b/rust/hg-core/src/dagops.rs
@@ -136,7 +136,7 @@
 ///
 /// Currently, the given `Graph` has to implement `Clone`, which means
 /// actually cloning just a reference-counted Python pointer if
-/// it's passed over through `rust-cpython`. This is due to the internal
+/// it's passed over through `rust-pyo3`. This is due to the internal
 /// use of `AncestorsIterator`
 ///
 /// # Algorithmic details
diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9kaXJzdGF0ZV9tYXAucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9kaXJzdGF0ZV9tYXAucnM= 100644
--- a/rust/hg-core/src/dirstate/dirstate_map.rs
+++ b/rust/hg-core/src/dirstate/dirstate_map.rs
@@ -537,7 +537,7 @@
         }
     }
 
-    #[logging_timer::time("trace")]
+    #[tracing::instrument(level = "debug", skip_all)]
     pub fn new_v2(
         on_disk: &'on_disk [u8],
         data_size: usize,
@@ -552,7 +552,7 @@
         }
     }
 
-    #[logging_timer::time("trace")]
+    #[tracing::instrument(level = "debug", skip_all)]
     pub fn new_v1(
         on_disk: &'on_disk [u8],
         identity: Option<DirstateIdentity>,
@@ -1327,7 +1327,7 @@
         })
     }
 
-    #[logging_timer::time("trace")]
+    #[tracing::instrument(level = "debug", skip_all)]
     pub fn pack_v1(
         &self,
         parents: DirstateParents,
@@ -1367,7 +1367,7 @@
     /// appended to the existing data file whose content is at
     /// `map.on_disk` (true), instead of written to a new data file
     /// (false), and the previous size of data on disk.
-    #[logging_timer::time("trace")]
+    #[tracing::instrument(level = "debug", skip_all)]
     pub fn pack_v2(
         &self,
         write_mode: DirstateMapWriteMode,
diff --git a/rust/hg-core/src/dirstate/on_disk.rs b/rust/hg-core/src/dirstate/on_disk.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9vbl9kaXNrLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9vbl9kaXNrLnJz 100644
--- a/rust/hg-core/src/dirstate/on_disk.rs
+++ b/rust/hg-core/src/dirstate/on_disk.rs
@@ -631,5 +631,5 @@
         DirstateMapWriteMode::ForceAppend => true,
     };
     if append {
-        log::trace!("appending to the dirstate data file");
+        tracing::debug!("appending to the dirstate data file");
     } else {
@@ -635,5 +635,5 @@
     } else {
-        log::trace!("creating new dirstate data file");
+        tracing::debug!("creating new dirstate data file");
     }
 
     // This ignores the space for paths, and for nodes without an entry.
diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9wYXJzZXJzLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9wYXJzZXJzLnJz 100644
--- a/rust/hg-core/src/dirstate/parsers.rs
+++ b/rust/hg-core/src/dirstate/parsers.rs
@@ -34,7 +34,7 @@
     Ok(parents)
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 pub fn parse_dirstate(contents: &[u8]) -> Result<ParseResult, HgError> {
     let mut copies = Vec::new();
     let mut entries = Vec::new();
diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9zdGF0dXMucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9kaXJzdGF0ZS9zdGF0dXMucnM= 100644
--- a/rust/hg-core/src/dirstate/status.rs
+++ b/rust/hg-core/src/dirstate/status.rs
@@ -177,7 +177,7 @@
 /// and its use of `itertools::merge_join_by`. When reaching a path that only
 /// exists in one of the two trees, depending on information requested by
 /// `options` we may need to traverse the remaining subtree.
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 pub fn status<'dirstate>(
     dmap: &'dirstate mut DirstateMap,
     matcher: &(dyn Matcher + Sync),
@@ -530,6 +530,10 @@
 
     /// Returns whether all child entries of the filesystem directory have a
     /// corresponding dirstate node or are ignored.
+    #[cfg_attr(
+        feature = "full-tracing",
+        tracing::instrument(level = "trace", skip_all)
+    )]
     fn traverse_fs_directory_and_dirstate<'ancestor>(
         &self,
         has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
@@ -673,6 +677,10 @@
         .map(|res| res && readdir_succeeded)
     }
 
+    #[cfg_attr(
+        feature = "full-tracing",
+        tracing::instrument(level = "trace", skip_all)
+    )]
     fn traverse_fs_and_dirstate<'ancestor>(
         &self,
         fs_entry: &DirEntry,
@@ -922,6 +930,10 @@
         Ok(())
     }
 
+    #[cfg_attr(
+        feature = "full-tracing",
+        tracing::instrument(level = "trace", skip_all)
+    )]
     /// A node in the dirstate tree has no corresponding filesystem entry
     fn traverse_dirstate_only(
         &self,
@@ -962,6 +974,10 @@
         Ok(())
     }
 
+    #[cfg_attr(
+        feature = "full-tracing",
+        tracing::instrument(level = "trace", skip_all)
+    )]
     /// Something in the filesystem has no corresponding dirstate node
     ///
     /// Returns whether that path is ignored
diff --git a/rust/hg-core/src/matchers.rs b/rust/hg-core/src/matchers.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9tYXRjaGVycy5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9tYXRjaGVycy5ycw== 100644
--- a/rust/hg-core/src/matchers.rs
+++ b/rust/hg-core/src/matchers.rs
@@ -846,7 +846,7 @@
 /// This can fail when the pattern is invalid or not supported by the
 /// underlying engine (the `regex` crate), for instance anything with
 /// back-references.
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 fn re_matcher(pattern: &Hir) -> PatternResult<RegexMatcher> {
     let re = regex_automata::meta::Builder::new()
         .configure(
@@ -865,7 +865,7 @@
     })
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 /// Returns the regex pattern and a function that matches an `HgPath` against
 /// said regex formed by the given ignore patterns.
 fn build_regex_match<'a>(
@@ -910,7 +910,7 @@
     Ok((full_regex, func))
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 fn build_regex_match_for_debug<'a>(
     ignore_patterns: &[IgnorePattern],
     glob_suffix: GlobSuffix,
diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9yZXBvLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9yZXBvLnJz 100644
--- a/rust/hg-core/src/repo.rs
+++ b/rust/hg-core/src/repo.rs
@@ -322,7 +322,7 @@
             match docket_res {
                 Ok(docket) => docket.parents(),
                 Err(_) => {
-                    log::info!(
+                    tracing::info!(
                         "Parsing dirstate docket failed, \
                         falling back to dirstate-v1"
                     );
@@ -368,7 +368,7 @@
                     ))
                 }
                 Err(_) => {
-                    log::info!(
+                    tracing::info!(
                         "Parsing dirstate docket failed, \
                         falling back to dirstate-v1"
                     );
@@ -403,7 +403,7 @@
                         DirstateError::Common(HgError::RaceDetected(
                             context,
                         )) => {
-                            log::info!(
+                            tracing::info!(
                                 "dirstate read race detected {} (retry {}/{})",
                                 context,
                                 tries,
@@ -412,7 +412,7 @@
                             continue;
                         }
                         _ => {
-                            log::info!(
+                            tracing::info!(
                                 "Reading dirstate v2 failed, \
                                 falling back to v1"
                             );
@@ -669,7 +669,7 @@
                 // TODO complain loudly if we've changed anything important
                 // without taking the lock.
                 // (see `hg help config.format.use-dirstate-tracked-hint`)
-                log::debug!(
+                tracing::debug!(
                     "dirstate has changed since last read, not updating."
                 );
                 return Ok(());
@@ -725,6 +725,6 @@
             //   https://github.com/openzfs/zfs/issues/13370
             //
             if !append {
-                log::trace!("creating a new dirstate data file");
+                tracing::debug!("creating a new dirstate data file");
                 options.create_new(true);
             } else {
@@ -729,6 +729,6 @@
                 options.create_new(true);
             } else {
-                log::trace!("appending to the dirstate data file");
+                tracing::debug!("appending to the dirstate data file");
             }
 
             let data_size = (|| {
@@ -768,7 +768,7 @@
                 // TODO complain loudly if we've changed anything important
                 // without taking the lock.
                 // (see `hg help config.format.use-dirstate-tracked-hint`)
-                log::debug!(
+                tracing::debug!(
                     "dirstate has changed since last read, not updating."
                 );
                 return Ok(());
diff --git a/rust/hg-core/src/revlog/file_io.rs b/rust/hg-core/src/revlog/file_io.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvZmlsZV9pby5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvZmlsZV9pby5ycw== 100644
--- a/rust/hg-core/src/revlog/file_io.rs
+++ b/rust/hg-core/src/revlog/file_io.rs
@@ -64,7 +64,7 @@
         handle.read_exact(length).when_reading_file(&self.filename)
     }
 
-    /// `pub` only for hg-cpython
+    /// `pub` only for hg-pyo3
     #[doc(hidden)]
     pub fn get_read_handle(&self) -> Result<FileHandle, HgError> {
         if let Some(handle) = &*self.writing_handle.get_or_default().borrow() {
@@ -91,7 +91,7 @@
         Ok(new_handle)
     }
 
-    /// `pub` only for hg-cpython
+    /// `pub` only for hg-pyo3
     #[doc(hidden)]
     pub fn exit_reading_context(&self) {
         self.reading_handle.get().map(|h| h.take());
diff --git a/rust/hg-core/src/revlog/index.rs b/rust/hg-core/src/revlog/index.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvaW5kZXgucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvaW5kZXgucnM= 100644
--- a/rust/hg-core/src/revlog/index.rs
+++ b/rust/hg-core/src/revlog/index.rs
@@ -1197,7 +1197,7 @@
         revisions: &[Revision],
     ) -> Result<Vec<Revision>, GraphError> {
         // given that revisions is expected to be small, we find this shortcut
-        // potentially acceptable, especially given that `hg-cpython` could
+        // potentially acceptable, especially given that `hg-pyo3` could
         // very much bypass this, constructing a vector of unique values from
         // the onset.
         let as_set: HashSet<Revision> = revisions.iter().copied().collect();
diff --git a/rust/hg-core/src/revlog/inner_revlog.rs b/rust/hg-core/src/revlog/inner_revlog.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvaW5uZXJfcmV2bG9nLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy9yZXZsb2cvaW5uZXJfcmV2bG9nLnJz 100644
--- a/rust/hg-core/src/revlog/inner_revlog.rs
+++ b/rust/hg-core/src/revlog/inner_revlog.rs
@@ -431,7 +431,7 @@
         res.map_err(Into::into)
     }
 
-    /// `pub` only for use in hg-cpython
+    /// `pub` only for use in hg-pyo3
     #[doc(hidden)]
     pub fn enter_reading_context(&self) -> Result<(), HgError> {
         if self.is_empty() {
@@ -449,7 +449,7 @@
         Ok(())
     }
 
-    /// `pub` only for use in hg-cpython
+    /// `pub` only for use in hg-pyo3
     #[doc(hidden)]
     pub fn exit_reading_context(&self) {
         self.segment_file.exit_reading_context()
@@ -512,7 +512,7 @@
         Ok(())
     }
 
-    /// Only `pub` for `hg-cpython`.
+    /// Only `pub` for `hg-pyo3`.
     /// Obtain decompressed raw data for the specified revisions that are
     /// assumed to be in ascending order.
     ///
@@ -821,7 +821,7 @@
         Ok(res)
     }
 
-    /// `pub` only for use in hg-cpython
+    /// `pub` only for use in hg-pyo3
     #[doc(hidden)]
     pub fn exit_writing_context(&mut self) {
         self.writing_handles.take();
@@ -829,9 +829,9 @@
         self.segment_file.reading_handle.get().map(|h| h.take());
     }
 
-    /// `pub` only for use in hg-cpython
+    /// `pub` only for use in hg-pyo3
     #[doc(hidden)]
     pub fn python_writing_handles(&self) -> Option<&WriteHandles> {
         self.writing_handles.as_ref()
     }
 
@@ -833,9 +833,9 @@
     #[doc(hidden)]
     pub fn python_writing_handles(&self) -> Option<&WriteHandles> {
         self.writing_handles.as_ref()
     }
 
-    /// `pub` only for use in hg-cpython
+    /// `pub` only for use in hg-pyo3
     #[doc(hidden)]
     pub fn enter_writing_context(
         &mut self,
@@ -1285,7 +1285,7 @@
         Ok(self.canonical_index_file())
     }
 
-    /// `pub` only for `hg-cpython`. This is made a different method than
+    /// `pub` only for `hg-pyo3`. This is made a different method than
     /// [`Revlog::index`] in case there is a different invariant that pops up
     /// later.
     #[doc(hidden)]
diff --git a/rust/hg-core/src/update.rs b/rust/hg-core/src/update.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy91cGRhdGUucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy91cGRhdGUucnM= 100644
--- a/rust/hg-core/src/update.rs
+++ b/rust/hg-core/src/update.rs
@@ -46,6 +46,7 @@
 /// Do not call this outside of a Python context. This does *not* handle any
 /// of the checks, hooks, lock taking needed to setup and get out of this
 /// update from the null revision.
+#[tracing::instrument(level = "debug", skip_all)]
 pub fn update_from_null(
     repo: &Repo,
     to: UncheckedRevision,
@@ -126,9 +127,10 @@
 
     // Reset the global interrupt now that we're done
     if INTERRUPT_RECEIVED.swap(false, Ordering::Relaxed) {
+        tracing::warn!("Interrupt received, aborting the update");
         // The threads have all exited early, let's re-raise
         return Err(HgError::InterruptReceived);
     }
 
     let errors: Vec<HgError> = errors_receiver.iter().collect();
     if !errors.is_empty() {
@@ -129,8 +131,11 @@
         // The threads have all exited early, let's re-raise
         return Err(HgError::InterruptReceived);
     }
 
     let errors: Vec<HgError> = errors_receiver.iter().collect();
     if !errors.is_empty() {
-        log::debug!("{} errors during update (see trace logs)", errors.len());
+        tracing::debug!(
+            "{} errors during update (see trace logs)",
+            errors.len()
+        );
         for error in errors.iter() {
@@ -136,5 +141,5 @@
         for error in errors.iter() {
-            log::trace!("{}", error);
+            tracing::trace!("{}", error);
         }
         // Best we can do is raise the first error (in order of the channel)
         return Err(errors.into_iter().next().expect("can never be empty"));
@@ -173,7 +178,7 @@
 /// Chunking less (and doing approximately `files_count`/`threads`) actually
 /// ends up being less performant: my hypothesis is `rayon`'s work stealing
 /// being more efficient with tasks of varying lengths.
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 fn chunk_tracked_files(
     tracked_files: Vec<ExpandedManifestEntry>,
 ) -> Vec<(&HgPath, Vec<ExpandedManifestEntry>)> {
@@ -198,7 +203,7 @@
     chunks
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 #[allow(clippy::too_many_arguments)]
 fn create_working_copy<'a: 'b, 'b>(
     chunks: Vec<(&HgPath, Vec<ExpandedManifestEntry<'a>>)>,
@@ -242,7 +247,10 @@
                     ))
                     .expect("channel should not be disconnected"),
                 Ok(pool) => {
-                    log::trace!("restricting update to {} threads", workers);
+                    tracing::debug!(
+                        "restricting update to {} threads",
+                        workers
+                    );
                     pool.install(|| {
                         let _ =
                             chunks.into_par_iter().try_for_each(work_closure);
@@ -356,7 +364,7 @@
     Ok(())
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all)]
 fn update_dirstate(
     repo: &Repo,
     files_receiver: Receiver<(&HgPath, u32, usize, TruncatedTimestamp)>,
@@ -483,7 +491,7 @@
 
     while fs_time == old_fs_time {
         if std::time::Instant::now() - start > FS_TICK_WAIT_TIMEOUT {
-            log::trace!(
+            tracing::debug!(
                 "timed out waiting for the fs clock to tick after {:?}",
                 FS_TICK_WAIT_TIMEOUT
             );
@@ -491,7 +499,7 @@
         }
         fs_time = filesystem_now(working_directory_path).ok()?;
     }
-    log::trace!(
+    tracing::debug!(
         "waited for {:?} before writing the dirstate",
         fs_time.duration_since(old_fs_time)
     );
diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy91dGlscy5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy91dGlscy5ycw== 100644
--- a/rust/hg-core/src/utils.rs
+++ b/rust/hg-core/src/utils.rs
@@ -262,7 +262,8 @@
             .num_threads(new_thread_count)
             .build_global();
         if res.is_ok() {
-            log::trace!(
+            tracing::debug!(
+                name: "threadpool capped",
                 "Capped the rayon threadpool to {new_thread_count} threads",
             );
         }
diff --git a/rust/hg-core/src/utils/debug.rs b/rust/hg-core/src/utils/debug.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jb3JlL3NyYy91dGlscy9kZWJ1Zy5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1jb3JlL3NyYy91dGlscy9kZWJ1Zy5ycw== 100644
--- a/rust/hg-core/src/utils/debug.rs
+++ b/rust/hg-core/src/utils/debug.rs
@@ -27,11 +27,7 @@
     let timeout_seconds =
         match config.get_u32(b"devel", timeout_opt.as_bytes()) {
             Ok(Some(timeout)) => timeout,
-            Err(e) => {
-                log::debug!("{e}");
-                default_timeout
-            }
             _ => default_timeout,
         };
     let timeout_seconds = timeout_seconds as u64;
 
@@ -34,8 +30,8 @@
             _ => default_timeout,
         };
     let timeout_seconds = timeout_seconds as u64;
 
-    log::debug!(
+    tracing::debug!(
         "Config option `{config_option}` found, \
              waiting for file `{file_path}` to be created"
     );
@@ -62,7 +58,7 @@
     let mut found = false;
     while start.elapsed() < timeout {
         if path.exists() {
-            log::debug!("File `{file_path}` was created");
+            tracing::debug!("File `{file_path}` was created");
             found = true;
             break;
         } else {
diff --git a/rust/hg-cpython/Cargo.toml b/rust/hg-cpython/Cargo.toml
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL0NhcmdvLnRvbWw=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/Cargo.toml
+++ /dev/null
@@ -1,24 +0,0 @@
-[package]
-name = "hg-cpython"
-version = "0.1.0"
-authors = ["Georges Racinet <gracinet@anybox.fr>"]
-edition = "2021"
-
-[lints]
-workspace = true
-
-[lib]
-name='rusthg'
-crate-type = ["cdylib"]
-
-[dependencies]
-cpython = { version = "0.7.2", features = ["extension-module"] }
-crossbeam-channel = "0.5.6"
-hg-core = { path = "../hg-core"}
-libc = "0.2.137"
-log = "0.4.17"
-env_logger = "0.11"
-stable_deref_trait = "1.2.0"
-vcsgraph = "0.2.0"
-logging_timer = "1.1.0"
-python3-sys = "0.7.2"
diff --git a/rust/hg-cpython/src/ancestors.rs b/rust/hg-cpython/src/ancestors.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9hbmNlc3RvcnMucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/ancestors.rs
+++ /dev/null
@@ -1,414 +0,0 @@
-// ancestors.rs
-//
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::ancestors` module provided by the
-//! `hg-core` crate. From Python, this will be seen as `rustext.ancestor`
-//! and can be used as replacement for the the pure `ancestor` Python module.
-//!
-//! # Classes visible from Python:
-//! - [`LazyAncestors`] is the Rust implementation of
-//!   `mercurial.ancestor.lazyancestors`. The only difference is that it is
-//!   instantiated with a C `parsers.index` instance instead of a parents
-//!   function.
-//!
-//! - [`MissingAncestors`] is the Rust implementation of
-//!   `mercurial.ancestor.incrementalmissingancestors`.
-//!
-//!   API differences:
-//!    + it is instantiated with a C `parsers.index` instance instead of a
-//!      parents function.
-//!    + `MissingAncestors.bases` is a method returning a tuple instead of
-//!      a set-valued attribute. We could return a Python set easily if our
-//!      [PySet PR](https://github.com/dgrunwald/rust-cpython/pull/165)
-//!      is accepted.
-//!
-//! - [`AncestorsIterator`] is the Rust counterpart of the
-//!   `ancestor._lazyancestorsiter` Python generator. From Python, instances of
-//!   this should be mainly obtained by calling `iter()` on a [`LazyAncestors`]
-//!   instance.
-//!
-//! [`LazyAncestors`]: struct.LazyAncestors.html
-//! [`MissingAncestors`]: struct.MissingAncestors.html
-//! [`AncestorsIterator`]: struct.AncestorsIterator.html
-use crate::revlog::py_rust_index_to_graph;
-use crate::PyRevision;
-use crate::{
-    conversion::rev_pyiter_collect, exceptions::GraphError,
-    revlog::PySharedIndex,
-};
-use cpython::{
-    ObjectProtocol, PyClone, PyDict, PyErr, PyList, PyModule, PyObject,
-    PyResult, Python, PythonObject, ToPyObject, UnsafePyLeaked,
-};
-
-use hg::MissingAncestors as CoreMissing;
-use hg::Revision;
-use std::cell::RefCell;
-use std::collections::HashSet;
-use vcsgraph::lazy_ancestors::{
-    AncestorsIterator as VCGAncestorsIterator,
-    LazyAncestors as VCGLazyAncestors,
-};
-
-// Error propagation for an [`UnsafePyLeaked`] wrapping a [`Result`]
-//
-// It would be nice for UnsharedPyLeaked to provide this directly as a variant
-// of the `map` method with a signature such as:
-//
-// ```
-//   unsafe fn map_or_err(py: Python,
-//                        f: impl FnOnce(T) -> Result(U, E),
-//                        convert_err: impl FnOnce(Python, E) -> PyErr)
-// ```
-//
-// This would spare users of the `cpython` crate the additional `unsafe` deref
-// to inspect the error and return it outside `UnsafePyLeaked`, and the
-// subsequent unwrapping that this function performs.
-fn pyleaked_or_map_err<T, E: std::fmt::Debug + Copy>(
-    py: Python,
-    leaked: UnsafePyLeaked<Result<T, E>>,
-    convert_err: impl FnOnce(Python, E) -> PyErr,
-) -> PyResult<UnsafePyLeaked<T>> {
-    // Result.inspect_err is unstable in Rust 1.61
-    // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-    if let Err(e) = *unsafe { leaked.try_borrow(py)? } {
-        return Err(convert_err(py, e));
-    }
-    // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-    Ok(unsafe {
-        leaked.map(py, |res| {
-            res.expect("Error case should have already be treated")
-        })
-    })
-}
-
-py_class!(pub class AncestorsIterator |py| {
-    data inner: RefCell<UnsafePyLeaked<VCGAncestorsIterator<PySharedIndex>>>;
-
-    def __next__(&self) -> PyResult<Option<PyRevision>> {
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        match inner.next() {
-            Some(Err(e)) => Err(GraphError::pynew_from_vcsgraph(py, e)),
-            None => Ok(None),
-            Some(Ok(r)) => Ok(Some(PyRevision(r))),
-        }
-    }
-
-    def __contains__(&self, rev: PyRevision) -> PyResult<bool> {
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        inner.contains(rev.0)
-            .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))
-    }
-
-    def __iter__(&self) -> PyResult<Self> {
-        Ok(self.clone_ref(py))
-    }
-
-    def __new__(
-        _cls,
-        index: PyObject,
-        initrevs: PyObject,
-        stoprev: PyRevision,
-        inclusive: bool
-    ) -> PyResult<AncestorsIterator> {
-        Self::inner_new(py, index, initrevs, stoprev, inclusive)
-    }
-
-});
-
-impl AncestorsIterator {
-    pub fn from_inner(
-        py: Python,
-        ait: UnsafePyLeaked<VCGAncestorsIterator<PySharedIndex>>,
-    ) -> PyResult<Self> {
-        Self::create_instance(py, RefCell::new(ait))
-    }
-
-    pub fn inner_new(
-        py: Python,
-        index: PyObject,
-        initrevs: PyObject,
-        stoprev: PyRevision,
-        inclusive: bool,
-    ) -> PyResult<AncestorsIterator> {
-        let index = py_rust_index_to_graph(py, index)?;
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let initvec: Vec<_> = {
-            let borrowed_idx = unsafe { index.try_borrow(py)? };
-            rev_pyiter_collect(py, &initrevs, &*borrowed_idx)?
-        };
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let res_ait = unsafe {
-            index.map(py, |idx| {
-                VCGAncestorsIterator::new(
-                    idx,
-                    initvec.into_iter().map(|r| r.0),
-                    stoprev.0,
-                    inclusive,
-                )
-            })
-        };
-        let ait =
-            pyleaked_or_map_err(py, res_ait, GraphError::pynew_from_vcsgraph)?;
-        AncestorsIterator::from_inner(py, ait)
-    }
-}
-
-py_class!(pub class LazyAncestors |py| {
-    data inner: RefCell<UnsafePyLeaked<
-        RefCell<VCGLazyAncestors<PySharedIndex>>
-        >>;
-    data index: PyObject;
-    data initrevs: PyObject;
-    data stoprev: PyRevision;
-    data inclusive: bool;
-
-    def __contains__(&self, rev: PyRevision) -> PyResult<bool> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &RefCell<VCGLazyAncestors<PySharedIndex>> =
-            &*unsafe { leaked.try_borrow(py)? };
-        let inner_mut: &mut VCGLazyAncestors<PySharedIndex> =
-            &mut inner.borrow_mut();
-        inner_mut.contains(rev.0)
-            .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))
-    }
-
-    def __iter__(&self) -> PyResult<AncestorsIterator> {
-        let index = self.index(py).clone_ref(py);
-        let initrevs = self.initrevs(py).clone_ref(py);
-        AncestorsIterator::inner_new(py, index, initrevs,
-                                     *self.stoprev(py),
-                                     *self.inclusive(py))
-    }
-
-    def __bool__(&self) -> PyResult<bool> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe { leaked.try_borrow(py)? };
-        let empty = inner.borrow().is_empty();
-        Ok(!empty)
-    }
-
-    def __new__(
-        _cls,
-        index: PyObject,
-        initrevs: PyObject,
-        stoprev: PyRevision,
-        inclusive: bool
-    ) -> PyResult<Self> {
-        let cloned_index = index.clone_ref(py);
-        let index = py_rust_index_to_graph(py, index)?;
-        let initvec: Vec<_> = {
-            // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-            let borrowed_idx =  unsafe {index.try_borrow(py)?};
-            rev_pyiter_collect(py, &initrevs, &*borrowed_idx)?
-        };
-
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let res_lazy =
-            unsafe { index.map(py, |idx| VCGLazyAncestors::new(
-                idx,
-                initvec.into_iter().map(|r| r.0),
-                stoprev.0,
-                inclusive
-            ))};
-        let lazy = pyleaked_or_map_err(py, res_lazy,
-                                       GraphError::pynew_from_vcsgraph)?;
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let lazy_cell = unsafe { lazy.map(py, RefCell::new)};
-        let res = Self::create_instance(
-            py, RefCell::new(lazy_cell),
-            cloned_index, initrevs, stoprev, inclusive)?;
-        Ok(res)
-        }
-
-});
-
-py_class!(pub class MissingAncestors |py| {
-    data inner: RefCell<UnsafePyLeaked<
-        CoreMissing<PySharedIndex>
-        >>;
-    data index: PyObject;
-
-    def __new__(
-        _cls,
-        index: PyObject,
-        bases: PyObject
-    )
-    -> PyResult<MissingAncestors> {
-        let cloned_index = index.clone_ref(py);
-        let inner_index = py_rust_index_to_graph(py, index)?;
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let bases_vec: Vec<_> = {
-            let borrowed_idx = unsafe { inner_index.try_borrow(py)? };
-            rev_pyiter_collect(py, &bases, &*borrowed_idx)?
-        };
-
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe {
-            inner_index.map(py, |idx| CoreMissing::new(idx, bases_vec))
-        };
-        MissingAncestors::create_instance(
-            py,
-            RefCell::new(inner),
-            cloned_index,
-        )
-    }
-
-    def hasbases(&self) -> PyResult<bool> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &CoreMissing<PySharedIndex> =
-            &*unsafe { leaked.try_borrow(py)? };
-        Ok(inner.has_bases())
-    }
-
-    def addbases(&self, bases: PyObject) -> PyResult<PyObject> {
-        let bases_vec: Vec<_> = {
-            let leaked = py_rust_index_to_graph(py,
-                                               self.index(py).clone_ref(py))?;
-            // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-            let index = &*unsafe { leaked.try_borrow(py)? };
-            rev_pyiter_collect(py, &bases, index)?
-        };
-
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &mut CoreMissing<PySharedIndex> =
-            &mut *unsafe { leaked.try_borrow_mut(py)? };
-
-        inner.add_bases(bases_vec);
-        // cpython doc has examples with PyResult<()> but this gives me
-        //   the trait `cpython::ToPyObject` is not implemented for `()`
-        // so let's return an explicit None
-        Ok(py.None())
-    }
-
-    def bases(&self) -> PyResult<HashSet<PyRevision>> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &CoreMissing<PySharedIndex> =
-            &*unsafe { leaked.try_borrow(py)? };
-        Ok(inner.get_bases()
-           .iter()
-           .map(|r| PyRevision(r.0))
-           .collect()
-        )
-    }
-
-    def basesheads(&self) -> PyResult<HashSet<PyRevision>> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &CoreMissing<PySharedIndex> =
-            &*unsafe { leaked.try_borrow(py)? };
-        Ok(
-            inner
-                .bases_heads()
-                .map_err(|e| GraphError::pynew(py, e))?
-                .into_iter()
-                .map(|r| PyRevision(r.0))
-                .collect()
-        )
-    }
-
-    def removeancestorsfrom(&self, revs: PyObject) -> PyResult<PyObject> {
-        let mut revs_pyset: HashSet<Revision> = {
-            // this is very lame: we convert to a Rust set, update it in place
-            // and then convert back to Python, only to have Python remove the
-            // excess (thankfully, Python is happy with a list or even an
-            // iterator)
-            // Leads to improve this:
-            //  - have the CoreMissing instead do something emit revisions to
-            //    discard
-            //  - define a trait for sets of revisions in the core and
-            //    implement it for a Python set rewrapped with the GIL marker
-            let leaked = py_rust_index_to_graph(py,
-                                               self.index(py).clone_ref(py))?;
-            // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-            let index = &*unsafe { leaked.try_borrow(py)? };
-            rev_pyiter_collect(py, &revs, index)?
-        };
-
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &mut CoreMissing<PySharedIndex> =
-            &mut *unsafe { leaked.try_borrow_mut(py)? };
-
-        inner.remove_ancestors_from(&mut revs_pyset)
-            .map_err(|e| GraphError::pynew(py, e))?;
-
-        // convert as Python list
-        let mut remaining_pyint_vec: Vec<PyObject> = Vec::with_capacity(
-            revs_pyset.len());
-        for rev in revs_pyset {
-            remaining_pyint_vec.push(
-                PyRevision(rev.0).to_py_object(py).into_object()
-            );
-        }
-        let remaining_pylist = PyList::new(py, remaining_pyint_vec.as_slice());
-        revs.call_method(py, "intersection_update", (remaining_pylist, ), None)
-    }
-
-    def missingancestors(&self, revs: PyObject) -> PyResult<PyList> {
-        let revs_vec: Vec<Revision> = {
-            let leaked = py_rust_index_to_graph(py,
-                                               self.index(py).clone_ref(py))?;
-            // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-            let index = &*unsafe { leaked.try_borrow(py)? };
-            rev_pyiter_collect(py, &revs, index)?
-        };
-
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner: &mut CoreMissing<PySharedIndex> =
-            &mut *unsafe { leaked.try_borrow_mut(py)? };
-
-        let missing_vec = match inner.missing_ancestors(revs_vec) {
-            Ok(missing) => missing,
-            Err(e) => {
-                return Err(GraphError::pynew(py, e));
-            }
-        };
-        // convert as Python list
-        let mut missing_pyint_vec: Vec<PyObject> = Vec::with_capacity(
-            missing_vec.len());
-        for rev in missing_vec {
-            missing_pyint_vec.push(
-                PyRevision(rev.0).to_py_object(py).into_object()
-            );
-        }
-        Ok(PyList::new(py, missing_pyint_vec.as_slice()))
-    }
-});
-
-/// Create the module, with __package__ given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.ancestor", package);
-    let m = PyModule::new(py, dotted_name)?;
-    m.add(py, "__package__", package)?;
-    m.add(
-        py,
-        "__doc__",
-        "Generic DAG ancestor algorithms - Rust implementation",
-    )?;
-    m.add_class::<AncestorsIterator>(py)?;
-    m.add_class::<LazyAncestors>(py)?;
-    m.add_class::<MissingAncestors>(py)?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-    // Example C code (see pyexpat.c and import.c) will "give away the
-    // reference", but we won't because it will be consumed once the
-    // Rust PyObject is dropped.
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/cindex.rs b/rust/hg-cpython/src/cindex.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9jaW5kZXgucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/cindex.rs
+++ /dev/null
@@ -1,220 +0,0 @@
-// cindex.rs
-//
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings to use the Index defined by the parsers C extension
-//!
-//! Ideally, we should use an Index entirely implemented in Rust,
-//! but this will take some time to get there.
-#![allow(dead_code)]
-use cpython::{
-    exc::ImportError, exc::TypeError, ObjectProtocol, PyClone, PyErr,
-    PyObject, PyResult, PyTuple, Python, PythonObject,
-};
-use hg::revlog::{Node, RevlogIndex};
-use hg::{BaseRevision, Graph, GraphError, Revision};
-use libc::{c_int, ssize_t};
-
-const REVLOG_CABI_VERSION: c_int = 3;
-
-#[repr(C)]
-pub struct Revlog_CAPI {
-    abi_version: c_int,
-    index_length:
-        unsafe extern "C" fn(index: *mut revlog_capi::RawPyObject) -> ssize_t,
-    index_node: unsafe extern "C" fn(
-        index: *mut revlog_capi::RawPyObject,
-        rev: ssize_t,
-    ) -> *const Node,
-    fast_rank: unsafe extern "C" fn(
-        index: *mut revlog_capi::RawPyObject,
-        rev: ssize_t,
-    ) -> ssize_t,
-    index_parents: unsafe extern "C" fn(
-        index: *mut revlog_capi::RawPyObject,
-        rev: c_int,
-        ps: *mut [c_int; 2],
-    ) -> c_int,
-}
-
-py_capsule!(
-    from mercurial.cext.parsers import revlog_CAPI
-        as revlog_capi for Revlog_CAPI);
-
-/// A `Graph` backed up by objects and functions from revlog.c
-///
-/// This implementation of the `Graph` trait, relies on (pointers to)
-/// - the C index object (`index` member)
-/// - the `index_get_parents()` function (`parents` member)
-///
-/// # Safety
-///
-/// The C index itself is mutable, and this Rust exposition is **not
-/// protected by the GIL**, meaning that this construct isn't safe with respect
-/// to Python threads.
-///
-/// All callers of this `Index` must acquire the GIL and must not release it
-/// while working.
-///
-/// # TODO find a solution to make it GIL safe again.
-///
-/// This is non trivial, and can wait until we have a clearer picture with
-/// more Rust Mercurial constructs.
-///
-/// One possibility would be to a `GILProtectedIndex` wrapper enclosing
-/// a `Python<'p>` marker and have it be the one implementing the
-/// `Graph` trait, but this would mean the `Graph` implementor would become
-/// likely to change between subsequent method invocations of the `hg-core`
-/// objects (a serious change of the `hg-core` API):
-/// either exposing ways to mutate the `Graph`, or making it a non persistent
-/// parameter in the relevant methods that need one.
-///
-/// Another possibility would be to introduce an abstract lock handle into
-/// the core API, that would be tied to `GILGuard` / `Python<'p>`
-/// in the case of the `cpython` crate bindings yet could leave room for other
-/// mechanisms in other contexts.
-pub struct Index {
-    index: PyObject,
-    capi: &'static Revlog_CAPI,
-}
-
-impl Index {
-    pub fn new(py: Python, index: PyObject) -> PyResult<Self> {
-        let capi = unsafe { revlog_capi::retrieve(py)? };
-        if capi.abi_version != REVLOG_CABI_VERSION {
-            return Err(PyErr::new::<ImportError, _>(
-                py,
-                format!(
-                    "ABI version mismatch: the C ABI revlog version {} \
-                     does not match the {} expected by Rust hg-cpython",
-                    capi.abi_version, REVLOG_CABI_VERSION
-                ),
-            ));
-        }
-        let compat: u64 = index.getattr(py, "rust_ext_compat")?.extract(py)?;
-        if compat == 0 {
-            return Err(PyErr::new::<TypeError, _>(
-                py,
-                "index object not compatible with Rust",
-            ));
-        }
-        Ok(Index { index, capi })
-    }
-
-    /// return a reference to the CPython Index object in this Struct
-    pub fn inner(&self) -> &PyObject {
-        &self.index
-    }
-
-    pub fn append(&mut self, py: Python, tup: PyTuple) -> PyResult<PyObject> {
-        self.index.call_method(
-            py,
-            "append",
-            PyTuple::new(py, &[tup.into_object()]),
-            None,
-        )
-    }
-}
-
-impl Clone for Index {
-    fn clone(&self) -> Self {
-        let guard = Python::acquire_gil();
-        Index {
-            index: self.index.clone_ref(guard.python()),
-            capi: self.capi,
-        }
-    }
-}
-
-impl PyClone for Index {
-    fn clone_ref(&self, py: Python) -> Self {
-        Index {
-            index: self.index.clone_ref(py),
-            capi: self.capi,
-        }
-    }
-}
-
-impl Graph for Index {
-    /// wrap a call to the C extern parents function
-    fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
-        let mut res: [c_int; 2] = [0; 2];
-        let code = unsafe {
-            (self.capi.index_parents)(
-                self.index.as_ptr(),
-                rev.0 as c_int,
-                &mut res as *mut [c_int; 2],
-            )
-        };
-        match code {
-            0 => Ok([Revision(res[0]), Revision(res[1])]),
-            _ => Err(GraphError::ParentOutOfRange(rev)),
-        }
-    }
-}
-
-impl vcsgraph::graph::Graph for Index {
-    fn parents(
-        &self,
-        rev: BaseRevision,
-    ) -> Result<vcsgraph::graph::Parents, vcsgraph::graph::GraphReadError>
-    {
-        // FIXME This trait should be reworked to decide between Revision
-        // and UncheckedRevision, get better errors names, etc.
-        match Graph::parents(self, Revision(rev)) {
-            Ok(parents) => {
-                Ok(vcsgraph::graph::Parents([parents[0].0, parents[1].0]))
-            }
-            Err(GraphError::ParentOutOfRange(rev)) => {
-                Err(vcsgraph::graph::GraphReadError::KeyedInvalidKey(rev.0))
-            }
-            Err(GraphError::ParentOutOfOrder(_)) => {
-                Err(vcsgraph::graph::GraphReadError::InconsistentGraphData)
-            }
-        }
-    }
-}
-
-impl vcsgraph::graph::RankedGraph for Index {
-    fn rank(
-        &self,
-        rev: BaseRevision,
-    ) -> Result<vcsgraph::graph::Rank, vcsgraph::graph::GraphReadError> {
-        match unsafe {
-            (self.capi.fast_rank)(self.index.as_ptr(), rev as ssize_t)
-        } {
-            -1 => Err(vcsgraph::graph::GraphReadError::InconsistentGraphData),
-            rank => Ok(rank as usize),
-        }
-    }
-}
-
-impl RevlogIndex for Index {
-    /// Note C return type is Py_ssize_t (hence signed), but we shall
-    /// force it to unsigned, because it's a length
-    fn len(&self) -> usize {
-        unsafe { (self.capi.index_length)(self.index.as_ptr()) as usize }
-    }
-
-    fn node(&self, rev: Revision) -> Option<&Node> {
-        let raw = unsafe {
-            (self.capi.index_node)(self.index.as_ptr(), rev.0 as ssize_t)
-        };
-        if raw.is_null() {
-            None
-        } else {
-            // TODO it would be much better for the C layer to give us
-            // a length, since the hash length will change in the near
-            // future, but that's probably out of scope for the nodemap
-            // patch series.
-            //
-            // The root of that unsafety relies in the signature of
-            // `capi.index_node()` itself: returning a `Node` pointer
-            // whereas it's a `char *` in the C counterpart.
-            Some(unsafe { &*raw })
-        }
-    }
-}
diff --git a/rust/hg-cpython/src/conversion.rs b/rust/hg-cpython/src/conversion.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9jb252ZXJzaW9uLnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/conversion.rs
+++ /dev/null
@@ -1,57 +0,0 @@
-// conversion.rs
-//
-// Copyright 2019 Georges Racinet <georges.racinet@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the hg::ancestors module provided by the
-//! `hg-core` crate. From Python, this will be seen as `rustext.ancestor`
-
-use cpython::{ObjectProtocol, PyErr, PyObject, PyResult, Python};
-use hg::{revlog::RevlogIndex, Revision, UncheckedRevision};
-
-use crate::{exceptions::GraphError, PyRevision};
-
-/// Utility function to convert a Python iterable into various collections
-///
-/// We need this in particular to feed to various methods of inner objects
-/// with `impl IntoIterator<Item=Revision>` arguments, because
-/// a `PyErr` can arise at each step of iteration, whereas these methods
-/// expect iterables over `Revision`, not over some `Result<Revision, PyErr>`
-pub fn rev_pyiter_collect<C, I>(
-    py: Python,
-    revs: &PyObject,
-    index: &I,
-) -> PyResult<C>
-where
-    C: FromIterator<Revision>,
-    I: RevlogIndex,
-{
-    rev_pyiter_collect_or_else(py, revs, index, |r| {
-        PyErr::new::<GraphError, _>(py, ("InvalidRevision", r.0))
-    })
-}
-
-/// Same as [`rev_pyiter_collect`], giving control on returned errors
-pub fn rev_pyiter_collect_or_else<C, I>(
-    py: Python,
-    revs: &PyObject,
-    index: &I,
-    invalid_rev_error: impl FnOnce(PyRevision) -> PyErr + Copy,
-) -> PyResult<C>
-where
-    C: FromIterator<Revision>,
-    I: RevlogIndex,
-{
-    revs.iter(py)?
-        .map(|r| {
-            r.and_then(|o| match o.extract::<PyRevision>(py) {
-                Ok(r) => index
-                    .check_revision(UncheckedRevision(r.0))
-                    .ok_or_else(|| invalid_rev_error(r)),
-                Err(e) => Err(e),
-            })
-        })
-        .collect()
-}
diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9jb3B5X3RyYWNpbmcucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/copy_tracing.rs
+++ /dev/null
@@ -1,200 +0,0 @@
-use cpython::ObjectProtocol;
-use cpython::PyBytes;
-use cpython::PyDict;
-use cpython::PyDrop;
-use cpython::PyList;
-use cpython::PyModule;
-use cpython::PyObject;
-use cpython::PyResult;
-use cpython::PyTuple;
-use cpython::Python;
-
-use hg::copy_tracing::ChangedFiles;
-use hg::copy_tracing::CombineChangesetCopies;
-use hg::Revision;
-
-use crate::pybytes_deref::PyBytesDeref;
-use crate::PyRevision;
-
-/// Combines copies information contained into revision `revs` to build a copy
-/// map.
-///
-/// See mercurial/copies.py for details
-pub fn combine_changeset_copies_wrapper(
-    py: Python,
-    revs: PyList,
-    children_count: PyDict,
-    target_rev: PyRevision,
-    rev_info: PyObject,
-    multi_thread: bool,
-) -> PyResult<PyDict> {
-    let target_rev = Revision(target_rev.0);
-    let children_count = children_count
-        .items(py)
-        .iter()
-        .map(|(k, v)| {
-            Ok((Revision(k.extract::<PyRevision>(py)?.0), v.extract(py)?))
-        })
-        .collect::<PyResult<_>>()?;
-
-    /// (Revision number, parent 1, parent 2, copy data for this revision)
-    type RevInfo<Bytes> = (Revision, Revision, Revision, Option<Bytes>);
-
-    let revs_info =
-        revs.iter(py).map(|rev_py| -> PyResult<RevInfo<PyBytes>> {
-            let rev = Revision(rev_py.extract::<PyRevision>(py)?.0);
-            let tuple: PyTuple =
-                rev_info.call(py, (rev_py,), None)?.cast_into(py)?;
-            let p1 =
-                Revision(tuple.get_item(py, 0).extract::<PyRevision>(py)?.0);
-            let p2 =
-                Revision(tuple.get_item(py, 1).extract::<PyRevision>(py)?.0);
-            let opt_bytes = tuple.get_item(py, 2).extract(py)?;
-            Ok((rev, p1, p2, opt_bytes))
-        });
-
-    let path_copies;
-    if !multi_thread {
-        let mut combine_changeset_copies =
-            CombineChangesetCopies::new(children_count);
-
-        for rev_info in revs_info {
-            let (rev, p1, p2, opt_bytes) = rev_info?;
-            let files = match &opt_bytes {
-                Some(bytes) => ChangedFiles::new(bytes.data(py)),
-                // Python None was extracted to Option::None,
-                // meaning there was no copy data.
-                None => ChangedFiles::new_empty(),
-            };
-
-            combine_changeset_copies.add_revision(rev, p1, p2, files)
-        }
-        path_copies = combine_changeset_copies.finish(target_rev)
-    } else {
-        // Use a bounded channel to provide back-pressure:
-        // if the child thread is slower to process revisions than this thread
-        // is to gather data for them, an unbounded channel would keep
-        // growing and eat memory.
-        //
-        // TODO: tweak the bound?
-        let (rev_info_sender, rev_info_receiver) =
-            crossbeam_channel::bounded::<RevInfo<PyBytesDeref>>(1000);
-
-        // This channel (going the other way around) however is unbounded.
-        // If they were both bounded, there might potentially be deadlocks
-        // where both channels are full and both threads are waiting on each
-        // other.
-        let (pybytes_sender, pybytes_receiver) =
-            crossbeam_channel::unbounded();
-
-        // Start a thread that does CPU-heavy processing in parallel with the
-        // loop below.
-        //
-        // If the parent thread panics, `rev_info_sender` will be dropped and
-        // “disconnected”. `rev_info_receiver` will be notified of this and
-        // exit its own loop.
-        let thread = std::thread::spawn(move || {
-            let mut combine_changeset_copies =
-                CombineChangesetCopies::new(children_count);
-            for (rev, p1, p2, opt_bytes) in rev_info_receiver {
-                let files = match &opt_bytes {
-                    Some(raw) => ChangedFiles::new(raw.as_ref()),
-                    // Python None was extracted to Option::None,
-                    // meaning there was no copy data.
-                    None => ChangedFiles::new_empty(),
-                };
-                combine_changeset_copies.add_revision(rev, p1, p2, files);
-
-                // Send `PyBytes` back to the parent thread so the parent
-                // thread can drop it. Otherwise the GIL would be implicitly
-                // acquired here through `impl Drop for PyBytes`.
-                if let Some(bytes) = opt_bytes {
-                    if pybytes_sender.send(bytes.unwrap()).is_err() {
-                        // The channel is disconnected, meaning the parent
-                        // thread panicked or returned
-                        // early through
-                        // `?` to propagate a Python exception.
-                        break;
-                    }
-                }
-            }
-
-            combine_changeset_copies.finish(target_rev)
-        });
-
-        for rev_info in revs_info {
-            let (rev, p1, p2, opt_bytes) = rev_info?;
-            let opt_bytes = opt_bytes.map(|b| PyBytesDeref::new(py, b));
-
-            // We’d prefer to avoid the child thread calling into Python code,
-            // but this avoids a potential deadlock on the GIL if it does:
-            py.allow_threads(|| {
-                rev_info_sender.send((rev, p1, p2, opt_bytes)).expect(
-                    "combine_changeset_copies: channel is disconnected",
-                );
-            });
-
-            // Drop anything in the channel, without blocking
-            for pybytes in pybytes_receiver.try_iter() {
-                pybytes.release_ref(py)
-            }
-        }
-        // We’d prefer to avoid the child thread calling into Python code,
-        // but this avoids a potential deadlock on the GIL if it does:
-        path_copies = py.allow_threads(|| {
-            // Disconnect the channel to signal the child thread to stop:
-            // the `for … in rev_info_receiver` loop will end.
-            drop(rev_info_sender);
-
-            // Wait for the child thread to stop, and propagate any panic.
-            thread.join().unwrap_or_else(|panic_payload| {
-                std::panic::resume_unwind(panic_payload)
-            })
-        });
-
-        // Drop anything left in the channel
-        for pybytes in pybytes_receiver.iter() {
-            pybytes.release_ref(py)
-        }
-    };
-
-    let out = PyDict::new(py);
-    for (dest, source) in path_copies.into_iter() {
-        out.set_item(
-            py,
-            PyBytes::new(py, &dest.into_vec()),
-            PyBytes::new(py, &source.into_vec()),
-        )?;
-    }
-    Ok(out)
-}
-
-/// Create the module, with `__package__` given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.copy_tracing", package);
-    let m = PyModule::new(py, dotted_name)?;
-
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "Copy tracing - Rust implementation")?;
-
-    m.add(
-        py,
-        "combine_changeset_copies",
-        py_fn!(
-            py,
-            combine_changeset_copies_wrapper(
-                revs: PyList,
-                children: PyDict,
-                target_rev: PyRevision,
-                rev_info: PyObject,
-                multi_thread: bool
-            )
-        ),
-    )?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/dagops.rs b/rust/hg-cpython/src/dagops.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kYWdvcHMucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dagops.rs
+++ /dev/null
@@ -1,84 +0,0 @@
-// dagops.rs
-//
-// Copyright 2019 Georges Racinet <georges.racinet@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::dagops` module provided by the
-//! `hg-core` package.
-//!
-//! From Python, this will be seen as `mercurial.rustext.dagop`
-use crate::PyRevision;
-use crate::{conversion::rev_pyiter_collect, exceptions::GraphError};
-use cpython::{PyDict, PyModule, PyObject, PyResult, Python};
-use hg::dagops;
-use hg::Revision;
-use std::collections::HashSet;
-use vcsgraph::graph::Rank;
-
-use crate::revlog::py_rust_index_to_graph;
-
-/// Using the the `index`, return heads out of any Python iterable of Revisions
-///
-/// This is the Rust counterpart for `mercurial.dagop.headrevs`
-pub fn headrevs(
-    py: Python,
-    index: PyObject,
-    revs: PyObject,
-) -> PyResult<HashSet<PyRevision>> {
-    let py_leaked = py_rust_index_to_graph(py, index)?;
-    // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-    let index = &*unsafe { py_leaked.try_borrow(py)? };
-    let mut as_set: HashSet<Revision> = rev_pyiter_collect(py, &revs, index)?;
-    dagops::retain_heads(index, &mut as_set)
-        .map_err(|e| GraphError::pynew(py, e))?;
-    Ok(as_set.into_iter().map(Into::into).collect())
-}
-
-/// Computes the rank, i.e. the number of ancestors including itself,
-/// of a node represented by its parents.
-///
-/// Currently, the pure Rust index supports only the REVLOGV1 format, hence
-/// the only possible return value is that the rank is unknown.
-///
-/// References:
-/// - C implementation, function `index_fast_rank()`.
-/// - `impl vcsgraph::graph::RankedGraph for Index` in `crate::cindex`.
-pub fn rank(
-    py: Python,
-    _index: PyObject,
-    _p1r: PyRevision,
-    _p2r: PyRevision,
-) -> PyResult<Rank> {
-    Err(GraphError::pynew_from_vcsgraph(
-        py,
-        vcsgraph::graph::GraphReadError::InconsistentGraphData,
-    ))
-}
-
-/// Create the module, with `__package__` given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.dagop", package);
-    let m = PyModule::new(py, dotted_name)?;
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "DAG operations - Rust implementation")?;
-    m.add(
-        py,
-        "headrevs",
-        py_fn!(py, headrevs(index: PyObject, revs: PyObject)),
-    )?;
-    m.add(
-        py,
-        "rank",
-        py_fn!(py, rank(index: PyObject, p1r: PyRevision, p2r: PyRevision)),
-    )?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-    // Example C code (see pyexpat.c and import.c) will "give away the
-    // reference", but we won't because it will be consumed once the
-    // Rust PyObject is dropped.
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/debug.rs b/rust/hg-cpython/src/debug.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kZWJ1Zy5ycw==..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/debug.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-// debug.rs
-//
-// Copyright 2020 Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Module to get debug information about Rust extensions.
-use cpython::{PyDict, PyModule, PyResult, Python};
-
-/// Create the module, with `__package__` given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.debug", package);
-    let m = PyModule::new(py, dotted_name)?;
-
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "Rust debugging information")?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/dirstate.rs b/rust/hg-cpython/src/dirstate.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS5ycw==..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate.rs
+++ /dev/null
@@ -1,70 +0,0 @@
-// dirstate.rs
-//
-// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::dirstate` module provided by the
-//! `hg-core` package.
-//!
-//! From Python, this will be seen as `mercurial.rustext.dirstate`
-mod copymap;
-mod dirs_multiset;
-mod dirstate_map;
-mod item;
-mod status;
-use self::item::DirstateItem;
-use crate::{
-    dirstate::{dirs_multiset::Dirs, status::status_wrapper},
-    exceptions,
-};
-use cpython::{PyBytes, PyDict, PyList, PyModule, PyObject, PyResult, Python};
-use dirstate_map::{DirstateIdentity, DirstateMap};
-use hg::dirstate::on_disk::V2_FORMAT_MARKER;
-
-/// Create the module, with `__package__` given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.dirstate", package);
-    let m = PyModule::new(py, dotted_name)?;
-
-    env_logger::init();
-
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "Dirstate - Rust implementation")?;
-
-    m.add(
-        py,
-        "FallbackError",
-        py.get_type::<exceptions::FallbackError>(),
-    )?;
-    m.add_class::<Dirs>(py)?;
-    m.add_class::<DirstateMap>(py)?;
-    m.add_class::<DirstateItem>(py)?;
-    m.add_class::<DirstateIdentity>(py)?;
-    m.add(py, "V2_FORMAT_MARKER", PyBytes::new(py, V2_FORMAT_MARKER))?;
-    m.add(
-        py,
-        "status",
-        py_fn!(
-            py,
-            status_wrapper(
-                dmap: DirstateMap,
-                root_dir: PyObject,
-                matcher: PyObject,
-                ignorefiles: PyList,
-                check_exec: bool,
-                list_clean: bool,
-                list_ignored: bool,
-                list_unknown: bool,
-                collect_traversed_dirs: bool
-            )
-        ),
-    )?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/dirstate/copymap.rs b/rust/hg-cpython/src/dirstate/copymap.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS9jb3B5bWFwLnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate/copymap.rs
+++ /dev/null
@@ -1,121 +0,0 @@
-// copymap.rs
-//
-// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for `hg::dirstate::dirstate_map::CopyMap` provided by the
-//! `hg-core` package.
-
-use cpython::{
-    PyBytes, PyClone, PyDict, PyObject, PyResult, Python, UnsafePyLeaked,
-};
-
-use crate::dirstate::dirstate_map::v2_error;
-use crate::dirstate::dirstate_map::DirstateMap;
-use hg::dirstate::on_disk::DirstateV2ParseError;
-use hg::dirstate::CopyMapIter;
-use hg::utils::hg_path::HgPath;
-
-py_class!(pub class CopyMap |py| {
-    data dirstate_map: DirstateMap;
-
-    def __getitem__(&self, key: PyObject) -> PyResult<PyBytes> {
-        (*self.dirstate_map(py)).copymapgetitem(py, key)
-    }
-
-    def __len__(&self) -> PyResult<usize> {
-        self.dirstate_map(py).copymaplen(py)
-    }
-
-    def __contains__(&self, key: PyObject) -> PyResult<bool> {
-        self.dirstate_map(py).copymapcontains(py, key)
-    }
-
-    def get(
-        &self,
-        key: PyObject,
-        default: Option<PyObject> = None
-    ) -> PyResult<Option<PyObject>> {
-        self.dirstate_map(py).copymapget(py, key, default)
-    }
-
-    def pop(
-        &self,
-        key: PyObject,
-        default: Option<PyObject> = None
-    ) -> PyResult<Option<PyObject>> {
-        self.dirstate_map(py).copymappop(py, key, default)
-    }
-
-    def __iter__(&self) -> PyResult<CopyMapKeysIterator> {
-        self.dirstate_map(py).copymapiter(py)
-    }
-
-    // Python's `dict()` builtin works with either a subclass of dict
-    // or an abstract mapping. Said mapping needs to implement `__getitem__`
-    // and `keys`.
-    def keys(&self) -> PyResult<CopyMapKeysIterator> {
-        self.dirstate_map(py).copymapiter(py)
-    }
-
-    def items(&self) -> PyResult<CopyMapItemsIterator> {
-        self.dirstate_map(py).copymapitemsiter(py)
-    }
-
-    def iteritems(&self) -> PyResult<CopyMapItemsIterator> {
-        self.dirstate_map(py).copymapitemsiter(py)
-    }
-
-    def __setitem__(
-        &self,
-        key: PyObject,
-        item: PyObject
-    ) -> PyResult<()> {
-        self.dirstate_map(py).copymapsetitem(py, key, item)?;
-        Ok(())
-    }
-
-    def copy(&self) -> PyResult<PyDict> {
-        self.dirstate_map(py).copymapcopy(py)
-    }
-
-});
-
-impl CopyMap {
-    pub fn from_inner(py: Python, dm: DirstateMap) -> PyResult<Self> {
-        Self::create_instance(py, dm)
-    }
-    fn translate_key(
-        py: Python,
-        res: Result<(&HgPath, &HgPath), DirstateV2ParseError>,
-    ) -> PyResult<Option<PyBytes>> {
-        let (k, _v) = res.map_err(|e| v2_error(py, e))?;
-        Ok(Some(PyBytes::new(py, k.as_bytes())))
-    }
-    fn translate_key_value(
-        py: Python,
-        res: Result<(&HgPath, &HgPath), DirstateV2ParseError>,
-    ) -> PyResult<Option<(PyBytes, PyBytes)>> {
-        let (k, v) = res.map_err(|e| v2_error(py, e))?;
-        Ok(Some((
-            PyBytes::new(py, k.as_bytes()),
-            PyBytes::new(py, v.as_bytes()),
-        )))
-    }
-}
-
-py_shared_iterator!(
-    CopyMapKeysIterator,
-    UnsafePyLeaked<CopyMapIter<'static>>,
-    CopyMap::translate_key,
-    Option<PyBytes>
-);
-
-py_shared_iterator!(
-    CopyMapItemsIterator,
-    UnsafePyLeaked<CopyMapIter<'static>>,
-    CopyMap::translate_key_value,
-    Option<(PyBytes, PyBytes)>
-);
diff --git a/rust/hg-cpython/src/dirstate/dirs_multiset.rs b/rust/hg-cpython/src/dirstate/dirs_multiset.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS9kaXJzX211bHRpc2V0LnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs
+++ /dev/null
@@ -1,107 +0,0 @@
-// dirs_multiset.rs
-//
-// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::dirstate::dirs_multiset` file provided by the
-//! `hg-core` package.
-
-use cpython::{
-    exc, ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyObject, PyResult,
-    Python, UnsafePyLeaked,
-};
-
-use hg::{
-    dirstate::dirs_multiset::{DirsMultiset, DirsMultisetIter},
-    utils::hg_path::{HgPath, HgPathBuf},
-};
-
-py_class!(pub class Dirs |py| {
-    @shared data inner: DirsMultiset;
-
-    // `map` is either a `dict` or a flat iterator (usually a `set`, sometimes
-    // a `list`)
-    def __new__(
-        _cls,
-        map: PyObject,
-    ) -> PyResult<Self> {
-        let inner = if map.cast_as::<PyDict>(py).is_ok() {
-            let err = "pathutil.dirs() with a dict should only be used by the Python dirstatemap \
-                and should not be used when Rust is enabled";
-            return Err(PyErr::new::<exc::TypeError, _>(py, err.to_string()))
-        } else {
-            let map: Result<Vec<HgPathBuf>, PyErr> = map
-                .iter(py)?
-                .map(|o| {
-                    Ok(HgPathBuf::from_bytes(
-                        o?.extract::<PyBytes>(py)?.data(py),
-                    ))
-                })
-                .collect();
-            DirsMultiset::from_manifest(&map?)
-                .map_err(|e| {
-                    PyErr::new::<exc::ValueError, _>(py, e.to_string())
-                })?
-        };
-
-        Self::create_instance(py, inner)
-    }
-
-    def addpath(&self, path: PyObject) -> PyResult<PyObject> {
-        self.inner(py).borrow_mut().add_path(
-            HgPath::new(path.extract::<PyBytes>(py)?.data(py)),
-        ).and(Ok(py.None())).map_err(|e| PyErr::new::<exc::ValueError, _>(
-                        py,
-                        e.to_string(),
-                    )
-        )
-    }
-
-    def delpath(&self, path: PyObject) -> PyResult<PyObject> {
-        self.inner(py).borrow_mut().delete_path(
-            HgPath::new(path.extract::<PyBytes>(py)?.data(py)),
-        )
-            .and(Ok(py.None()))
-            .map_err(|e|
-                        PyErr::new::<exc::ValueError, _>(
-                            py,
-                            e.to_string(),
-                        )
-            )
-    }
-    def __iter__(&self) -> PyResult<DirsMultisetKeysIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        DirsMultisetKeysIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.iter()) },
-        )
-    }
-
-    def __contains__(&self, item: PyObject) -> PyResult<bool> {
-        Ok(self.inner(py).borrow().contains(HgPath::new(
-            item.extract::<PyBytes>(py)?.data(py),
-        )))
-    }
-});
-
-impl Dirs {
-    pub fn from_inner(py: Python, d: DirsMultiset) -> PyResult<Self> {
-        Self::create_instance(py, d)
-    }
-
-    fn translate_key(
-        py: Python,
-        res: &HgPathBuf,
-    ) -> PyResult<Option<PyBytes>> {
-        Ok(Some(PyBytes::new(py, res.as_bytes())))
-    }
-}
-
-py_shared_iterator!(
-    DirsMultisetKeysIterator,
-    UnsafePyLeaked<DirsMultisetIter<'static>>,
-    Dirs::translate_key,
-    Option<PyBytes>
-);
diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS9kaXJzdGF0ZV9tYXAucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs
+++ /dev/null
@@ -1,601 +0,0 @@
-// dirstate_map.rs
-//
-// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::dirstate::dirstate_map` file provided by the
-//! `hg-core` package.
-
-use std::cell::RefMut;
-
-use cpython::{
-    exc, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, PyNone, PyObject,
-    PyResult, Python, PythonObject, ToPyObject, UnsafePyLeaked,
-};
-use hg::dirstate::{
-    dirstate_map::{
-        DirstateEntryReset, DirstateIdentity as CoreDirstateIdentity,
-    },
-    entry::{DirstateEntry, ParentFileData, TruncatedTimestamp},
-    DirstateError,
-};
-
-use crate::{
-    dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
-    dirstate::item::DirstateItem,
-    pybytes_deref::PyBytesDeref,
-};
-use hg::{
-    dirstate::dirstate_map::DirstateMapWriteMode,
-    dirstate::on_disk::DirstateV2ParseError,
-    dirstate::owning::OwningDirstateMap, dirstate::StateMapIter, revlog::Node,
-    utils::files::normalize_case, utils::hg_path::HgPath, DirstateParents,
-};
-
-// TODO
-//     This object needs to share references to multiple members of its Rust
-//     inner struct, namely `copy_map`, `dirs` and `all_dirs`.
-//     Right now `CopyMap` is done, but it needs to have an explicit reference
-//     to `RustDirstateMap` which itself needs to have an encapsulation for
-//     every method in `CopyMap` (copymapcopy, etc.).
-//     This is ugly and hard to maintain.
-//     The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
-//     `py_class!` is already implemented and does not mention
-//     `RustDirstateMap`, rightfully so.
-//     All attributes also have to have a separate refcount data attribute for
-//     leaks, with all methods that go along for reference sharing.
-py_class!(pub class DirstateMap |py| {
-    @shared data inner: OwningDirstateMap;
-
-    /// Returns a `(dirstate_map, parents)` tuple
-    @staticmethod
-    def new_v1(
-        on_disk: PyBytes,
-        identity: Option<DirstateIdentity>,
-    ) -> PyResult<PyObject> {
-        let on_disk = PyBytesDeref::new(py, on_disk);
-        let (map, parents) = OwningDirstateMap::new_v1(
-            on_disk,
-            identity.map(|i| *i.inner(py))
-        )
-            .map_err(|e| dirstate_error(py, e))?;
-        let map = Self::create_instance(py, map)?;
-        let p1 = PyBytes::new(py, parents.p1.as_bytes());
-        let p2 = PyBytes::new(py, parents.p2.as_bytes());
-        let parents = (p1, p2);
-        Ok((map, parents).to_py_object(py).into_object())
-    }
-
-    /// Returns a DirstateMap
-    @staticmethod
-    def new_v2(
-        on_disk: PyBytes,
-        data_size: usize,
-        tree_metadata: PyBytes,
-        uuid: PyBytes,
-        identity: Option<DirstateIdentity>,
-    ) -> PyResult<PyObject> {
-        let dirstate_error = |e: DirstateError| {
-            PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
-        };
-        let on_disk = PyBytesDeref::new(py, on_disk);
-        let uuid = uuid.data(py);
-        let map = OwningDirstateMap::new_v2(
-            on_disk,
-            data_size,
-            tree_metadata.data(py),
-            uuid.to_owned(),
-            identity.map(|i| *i.inner(py)),
-        ).map_err(dirstate_error)?;
-        let map = Self::create_instance(py, map)?;
-        Ok(map.into_object())
-    }
-
-    /// Returns an empty DirstateMap. Only used for a new dirstate.
-    @staticmethod
-    def new_empty() -> PyResult<PyObject> {
-        let map = OwningDirstateMap::new_empty(vec![], None);
-        let map = Self::create_instance(py, map)?;
-        Ok(map.into_object())
-    }
-
-    def clear(&self) -> PyResult<PyObject> {
-        self.inner(py).borrow_mut().clear();
-        Ok(py.None())
-    }
-
-    def get(
-        &self,
-        key: PyObject,
-        default: Option<PyObject> = None
-    ) -> PyResult<Option<PyObject>> {
-        let key = key.extract::<PyBytes>(py)?;
-        match self
-            .inner(py)
-            .borrow()
-            .get(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))?
-        {
-            Some(entry) => {
-                Ok(Some(DirstateItem::new_as_pyobject(py, entry)?))
-            },
-            None => Ok(default)
-        }
-    }
-
-    def set_tracked(&self, f: PyObject) -> PyResult<PyBool> {
-        let bytes = f.extract::<PyBytes>(py)?;
-        let path = HgPath::new(bytes.data(py));
-        let res = self.inner(py).borrow_mut().set_tracked(path);
-        let was_tracked = res.map_err(|_| PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))?;
-        Ok(was_tracked.to_py_object(py))
-    }
-
-    def set_untracked(&self, f: PyObject) -> PyResult<PyBool> {
-        let bytes = f.extract::<PyBytes>(py)?;
-        let path = HgPath::new(bytes.data(py));
-        let res = self.inner(py).borrow_mut().set_untracked(path);
-        let was_tracked = res.map_err(|_| PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))?;
-        Ok(was_tracked.to_py_object(py))
-    }
-
-    def set_clean(
-        &self,
-        f: PyObject,
-        mode: u32,
-        size: u32,
-        mtime: (i64, u32, bool)
-    ) -> PyResult<PyNone> {
-        let (mtime_s, mtime_ns, second_ambiguous) = mtime;
-        let timestamp = TruncatedTimestamp::new_truncate(
-            mtime_s, mtime_ns, second_ambiguous
-        );
-        let bytes = f.extract::<PyBytes>(py)?;
-        let path = HgPath::new(bytes.data(py));
-        let res = self.inner(py).borrow_mut().set_clean(
-            path, mode, size, timestamp,
-        );
-        res.map_err(|_| PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))?;
-        Ok(PyNone)
-    }
-
-    def set_possibly_dirty(&self, f: PyObject) -> PyResult<PyNone> {
-        let bytes = f.extract::<PyBytes>(py)?;
-        let path = HgPath::new(bytes.data(py));
-        let res = self.inner(py).borrow_mut().set_possibly_dirty(path);
-        res.map_err(|_| PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))?;
-        Ok(PyNone)
-    }
-
-    def reset_state(
-        &self,
-        f: PyObject,
-        wc_tracked: bool,
-        p1_tracked: bool,
-        p2_info: bool,
-        has_meaningful_mtime: bool,
-        parentfiledata: Option<(u32, u32, Option<(i64, u32, bool)>)>,
-    ) -> PyResult<PyNone> {
-        let mut has_meaningful_mtime = has_meaningful_mtime;
-        let parent_file_data = match parentfiledata {
-            None => {
-                has_meaningful_mtime = false;
-                None
-            },
-            Some(data) => {
-                let (mode, size, mtime_info) = data;
-                let mtime = if let Some(mtime_info) = mtime_info {
-                    let (mtime_s, mtime_ns, second_ambiguous) = mtime_info;
-                    let timestamp = TruncatedTimestamp::new_truncate(
-                        mtime_s, mtime_ns, second_ambiguous
-                    );
-                    Some(timestamp)
-                } else {
-                    has_meaningful_mtime = false;
-                    None
-                };
-                Some(ParentFileData {
-                    mode_size: Some((mode, size)),
-                    mtime,
-                })
-            }
-        };
-        let bytes = f.extract::<PyBytes>(py)?;
-        let path = HgPath::new(bytes.data(py));
-        let reset = DirstateEntryReset {
-            filename: path,
-            wc_tracked,
-            p1_tracked,
-            p2_info,
-            has_meaningful_mtime,
-            parent_file_data_opt: parent_file_data,
-            from_empty: false
-        };
-        let res = self.inner(py).borrow_mut().reset_state(reset);
-        res.map_err(|_| PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))?;
-        Ok(PyNone)
-    }
-
-    def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
-        let d = d.extract::<PyBytes>(py)?;
-        Ok(self.inner(py).borrow_mut()
-            .has_tracked_dir(HgPath::new(d.data(py)))
-            .map_err(|e| {
-                PyErr::new::<exc::ValueError, _>(py, e.to_string())
-            })?
-            .to_py_object(py))
-    }
-
-    def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
-        let d = d.extract::<PyBytes>(py)?;
-        Ok(self.inner(py).borrow_mut()
-            .has_dir(HgPath::new(d.data(py)))
-            .map_err(|e| {
-                PyErr::new::<exc::ValueError, _>(py, e.to_string())
-            })?
-            .to_py_object(py))
-    }
-
-    def write_v1(
-        &self,
-        p1: PyObject,
-        p2: PyObject,
-    ) -> PyResult<PyBytes> {
-        let inner = self.inner(py).borrow();
-        let parents = DirstateParents {
-            p1: extract_node_id(py, &p1)?,
-            p2: extract_node_id(py, &p2)?,
-        };
-        let result = inner.pack_v1(parents);
-        match result {
-            Ok(packed) => Ok(PyBytes::new(py, &packed)),
-            Err(_) => Err(PyErr::new::<exc::OSError, _>(
-                py,
-                "Dirstate error".to_string(),
-            )),
-        }
-    }
-
-    /// Returns new data together with whether that data should be appended to
-    /// the existing data file whose content is at `self.on_disk` (True),
-    /// instead of written to a new data file (False).
-    def write_v2(
-        &self,
-        write_mode: usize,
-    ) -> PyResult<PyObject> {
-        let inner = self.inner(py).borrow();
-        let rust_write_mode = match write_mode {
-            0 => DirstateMapWriteMode::Auto,
-            1 => DirstateMapWriteMode::ForceNewDataFile,
-            2 => DirstateMapWriteMode::ForceAppend,
-            _ => DirstateMapWriteMode::Auto, // XXX should we error out?
-        };
-        let result = inner.pack_v2(rust_write_mode);
-        match result {
-            Ok((packed, tree_metadata, append, _old_data_size)) => {
-                let packed = PyBytes::new(py, &packed);
-                let tree_metadata = PyBytes::new(py, tree_metadata.as_bytes());
-                let tuple = (packed, tree_metadata, append);
-                Ok(tuple.to_py_object(py).into_object())
-            },
-            Err(e) => Err(PyErr::new::<exc::OSError, _>(
-                py,
-                e.to_string(),
-            )),
-        }
-    }
-
-    def filefoldmapasdict(&self) -> PyResult<PyDict> {
-        let dict = PyDict::new(py);
-        for item in self.inner(py).borrow_mut().iter() {
-            let (path, entry) = item.map_err(|e| v2_error(py, e))?;
-            if !entry.removed() {
-                let key = normalize_case(path);
-                let value = path;
-                dict.set_item(
-                    py,
-                    PyBytes::new(py, key.as_bytes()).into_object(),
-                    PyBytes::new(py, value.as_bytes()).into_object(),
-                )?;
-            }
-        }
-        Ok(dict)
-    }
-
-    def __len__(&self) -> PyResult<usize> {
-        Ok(self.inner(py).borrow().len())
-    }
-
-    def __contains__(&self, key: PyObject) -> PyResult<bool> {
-        let key = key.extract::<PyBytes>(py)?;
-        self.inner(py)
-            .borrow()
-            .contains_key(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))
-    }
-
-    def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
-        let key = key.extract::<PyBytes>(py)?;
-        let key = HgPath::new(key.data(py));
-        match self
-            .inner(py)
-            .borrow()
-            .get(key)
-            .map_err(|e| v2_error(py, e))?
-        {
-            Some(entry) => {
-                Ok(DirstateItem::new_as_pyobject(py, entry)?)
-            },
-            None => Err(PyErr::new::<exc::KeyError, _>(
-                py,
-                String::from_utf8_lossy(key.as_bytes()),
-            )),
-        }
-    }
-
-    def keys(&self) -> PyResult<DirstateMapKeysIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        DirstateMapKeysIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.iter()) },
-        )
-    }
-
-    def items(&self) -> PyResult<DirstateMapItemsIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        DirstateMapItemsIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.iter()) },
-        )
-    }
-
-    def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        DirstateMapKeysIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.iter()) },
-        )
-    }
-
-    // TODO all copymap* methods, see docstring above
-    def copymapcopy(&self) -> PyResult<PyDict> {
-        let dict = PyDict::new(py);
-        for item in self.inner(py).borrow().copy_map_iter() {
-            let (key, value) = item.map_err(|e| v2_error(py, e))?;
-            dict.set_item(
-                py,
-                PyBytes::new(py, key.as_bytes()),
-                PyBytes::new(py, value.as_bytes()),
-            )?;
-        }
-        Ok(dict)
-    }
-
-    def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
-        let key = key.extract::<PyBytes>(py)?;
-        match self
-            .inner(py)
-            .borrow()
-            .copy_map_get(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))?
-        {
-            Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
-            None => Err(PyErr::new::<exc::KeyError, _>(
-                py,
-                String::from_utf8_lossy(key.data(py)),
-            )),
-        }
-    }
-    def copymap(&self) -> PyResult<CopyMap> {
-        CopyMap::from_inner(py, self.clone_ref(py))
-    }
-
-    def copymaplen(&self) -> PyResult<usize> {
-        Ok(self.inner(py).borrow().copy_map_len())
-    }
-    def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
-        let key = key.extract::<PyBytes>(py)?;
-        self.inner(py)
-            .borrow()
-            .copy_map_contains_key(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))
-    }
-    def copymapget(
-        &self,
-        key: PyObject,
-        default: Option<PyObject>
-    ) -> PyResult<Option<PyObject>> {
-        let key = key.extract::<PyBytes>(py)?;
-        match self
-            .inner(py)
-            .borrow()
-            .copy_map_get(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))?
-        {
-            Some(copy) => Ok(Some(
-                PyBytes::new(py, copy.as_bytes()).into_object(),
-            )),
-            None => Ok(default),
-        }
-    }
-    def copymapsetitem(
-        &self,
-        key: PyObject,
-        value: PyObject
-    ) -> PyResult<PyObject> {
-        let key = key.extract::<PyBytes>(py)?;
-        let value = value.extract::<PyBytes>(py)?;
-        self.inner(py)
-            .borrow_mut()
-            .copy_map_insert(
-                HgPath::new(key.data(py)),
-                HgPath::new(value.data(py)),
-            )
-            .map_err(|e| v2_error(py, e))?;
-        Ok(py.None())
-    }
-    def copymappop(
-        &self,
-        key: PyObject,
-        default: Option<PyObject>
-    ) -> PyResult<Option<PyObject>> {
-        let key = key.extract::<PyBytes>(py)?;
-        match self
-            .inner(py)
-            .borrow_mut()
-            .copy_map_remove(HgPath::new(key.data(py)))
-            .map_err(|e| v2_error(py, e))?
-        {
-            Some(copy) => Ok(Some(
-                PyBytes::new(py, copy.as_bytes()).into_object(),
-            )),
-            None => Ok(default),
-        }
-    }
-
-    def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        CopyMapKeysIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
-        )
-    }
-
-    def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
-        let leaked_ref = self.inner(py).leak_immutable();
-        CopyMapItemsIterator::from_inner(
-            py,
-            unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
-        )
-    }
-
-    def tracked_dirs(&self) -> PyResult<PyList> {
-        let dirs = PyList::new(py, &[]);
-        for path in self.inner(py).borrow_mut().iter_tracked_dirs()
-            .map_err(|e |dirstate_error(py, e))?
-        {
-            let path = path.map_err(|e| v2_error(py, e))?;
-            let path = PyBytes::new(py, path.as_bytes());
-            dirs.append(py, path.into_object())
-        }
-        Ok(dirs)
-    }
-
-    def setparents_fixup(&self) -> PyResult<PyDict> {
-        let dict = PyDict::new(py);
-        let copies = self.inner(py).borrow_mut().setparents_fixup();
-        for (key, value) in copies.map_err(|e| v2_error(py, e))? {
-            dict.set_item(
-                py,
-                PyBytes::new(py, key.as_bytes()),
-                PyBytes::new(py, value.as_bytes()),
-            )?;
-        }
-        Ok(dict)
-    }
-
-    def debug_iter(&self, all: bool) -> PyResult<PyList> {
-        let dirs = PyList::new(py, &[]);
-        for item in self.inner(py).borrow().debug_iter(all) {
-            let (path, (state, mode, size, mtime)) =
-                item.map_err(|e| v2_error(py, e))?;
-            let path = PyBytes::new(py, path.as_bytes());
-            let item = (path, state, mode, size, mtime);
-            dirs.append(py, item.to_py_object(py).into_object())
-        }
-        Ok(dirs)
-    }
-});
-
-impl DirstateMap {
-    pub fn get_inner_mut<'a>(
-        &'a self,
-        py: Python<'a>,
-    ) -> RefMut<'a, OwningDirstateMap> {
-        self.inner(py).borrow_mut()
-    }
-    fn translate_key(
-        py: Python,
-        res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
-    ) -> PyResult<Option<PyBytes>> {
-        let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
-        Ok(Some(PyBytes::new(py, f.as_bytes())))
-    }
-    fn translate_key_value(
-        py: Python,
-        res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
-    ) -> PyResult<Option<(PyBytes, PyObject)>> {
-        let (f, entry) = res.map_err(|e| v2_error(py, e))?;
-        Ok(Some((
-            PyBytes::new(py, f.as_bytes()),
-            DirstateItem::new_as_pyobject(py, entry)?,
-        )))
-    }
-}
-
-py_shared_iterator!(
-    DirstateMapKeysIterator,
-    UnsafePyLeaked<StateMapIter<'static>>,
-    DirstateMap::translate_key,
-    Option<PyBytes>
-);
-
-py_shared_iterator!(
-    DirstateMapItemsIterator,
-    UnsafePyLeaked<StateMapIter<'static>>,
-    DirstateMap::translate_key_value,
-    Option<(PyBytes, PyObject)>
-);
-
-py_class!(pub class DirstateIdentity |py| {
-    data inner: CoreDirstateIdentity;
-
-    def __new__(
-        _cls,
-        mode: u32,
-        dev: u64,
-        ino: u64,
-        nlink: u64,
-        uid: u32,
-        gid: u32,
-        size: u64,
-        mtime: i64,
-        mtime_nsec: i64,
-        ctime: i64,
-        ctime_nsec: i64) -> PyResult<DirstateIdentity> {
-            Self::create_instance(
-                py,
-                CoreDirstateIdentity {
-                    mode,
-                    dev,
-                    ino,
-                    nlink,
-                    uid,
-                    gid,
-                    size,
-                    mtime,
-                    mtime_nsec,
-                    ctime,
-                    ctime_nsec
-                }
-            )
-    }
-});
-
-fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
-    let bytes = obj.extract::<PyBytes>(py)?;
-    match bytes.data(py).try_into() {
-        Ok(s) => Ok(s),
-        Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
-    }
-}
-
-pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
-    PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
-}
-
-fn dirstate_error(py: Python<'_>, e: DirstateError) -> PyErr {
-    PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
-}
diff --git a/rust/hg-cpython/src/dirstate/item.rs b/rust/hg-cpython/src/dirstate/item.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS9pdGVtLnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate/item.rs
+++ /dev/null
@@ -1,249 +0,0 @@
-use cpython::exc;
-use cpython::ObjectProtocol;
-use cpython::PyBytes;
-use cpython::PyErr;
-use cpython::PyNone;
-use cpython::PyObject;
-use cpython::PyResult;
-use cpython::Python;
-use cpython::PythonObject;
-use hg::dirstate::entry::{DirstateEntry, DirstateV2Data, TruncatedTimestamp};
-use std::cell::Cell;
-
-py_class!(pub class DirstateItem |py| {
-    data entry: Cell<DirstateEntry>;
-
-    def __new__(
-        _cls,
-        wc_tracked: bool = false,
-        p1_tracked: bool = false,
-        p2_info: bool = false,
-        has_meaningful_data: bool = true,
-        has_meaningful_mtime: bool = true,
-        parentfiledata: Option<(u32, u32, Option<(u32, u32, bool)>)> = None,
-        fallback_exec: Option<bool> = None,
-        fallback_symlink: Option<bool> = None,
-
-    ) -> PyResult<DirstateItem> {
-        let mut mode_size_opt = None;
-        let mut mtime_opt = None;
-        if let Some((mode, size, mtime)) = parentfiledata {
-            if has_meaningful_data {
-                mode_size_opt = Some((mode, size))
-            }
-            if has_meaningful_mtime {
-                if let Some(m) = mtime {
-                    mtime_opt = Some(timestamp(py, m)?);
-                }
-            }
-        }
-        let entry = DirstateEntry::from_v2_data(DirstateV2Data {
-            wc_tracked,
-            p1_tracked,
-            p2_info,
-            mode_size: mode_size_opt,
-            mtime: mtime_opt,
-            fallback_exec,
-            fallback_symlink,
-        });
-        DirstateItem::create_instance(py, Cell::new(entry))
-    }
-
-    @property
-    def state(&self) -> PyResult<PyBytes> {
-        let state_byte: u8 = self.entry(py).get().state().into();
-        Ok(PyBytes::new(py, &[state_byte]))
-    }
-
-    @property
-    def mode(&self) -> PyResult<i32> {
-        Ok(self.entry(py).get().mode())
-    }
-
-    @property
-    def size(&self) -> PyResult<i32> {
-        Ok(self.entry(py).get().size())
-    }
-
-    @property
-    def mtime(&self) -> PyResult<i32> {
-        Ok(self.entry(py).get().mtime())
-    }
-
-    @property
-    def has_fallback_exec(&self) -> PyResult<bool> {
-        match self.entry(py).get().get_fallback_exec() {
-            Some(_) => Ok(true),
-            None => Ok(false),
-        }
-    }
-
-    @property
-    def fallback_exec(&self) -> PyResult<Option<bool>> {
-        match self.entry(py).get().get_fallback_exec() {
-            Some(exec) => Ok(Some(exec)),
-            None => Ok(None),
-        }
-    }
-
-    @fallback_exec.setter
-    def set_fallback_exec(&self, value: Option<PyObject>) -> PyResult<()> {
-        match value {
-            None => {self.entry(py).get().set_fallback_exec(None);},
-            Some(value) => {
-            if value.is_none(py) {
-                self.entry(py).get().set_fallback_exec(None);
-            } else {
-                self.entry(py).get().set_fallback_exec(
-                    Some(value.is_true(py)?)
-                );
-            }},
-        }
-        Ok(())
-    }
-
-    @property
-    def has_fallback_symlink(&self) -> PyResult<bool> {
-        match self.entry(py).get().get_fallback_symlink() {
-            Some(_) => Ok(true),
-            None => Ok(false),
-        }
-    }
-
-    @property
-    def fallback_symlink(&self) -> PyResult<Option<bool>> {
-        match self.entry(py).get().get_fallback_symlink() {
-            Some(symlink) => Ok(Some(symlink)),
-            None => Ok(None),
-        }
-    }
-
-    @fallback_symlink.setter
-    def set_fallback_symlink(&self, value: Option<PyObject>) -> PyResult<()> {
-        match value {
-            None => {self.entry(py).get().set_fallback_symlink(None);},
-            Some(value) => {
-            if value.is_none(py) {
-                self.entry(py).get().set_fallback_symlink(None);
-            } else {
-                self.entry(py).get().set_fallback_symlink(
-                    Some(value.is_true(py)?)
-                );
-            }},
-        }
-        Ok(())
-    }
-
-    @property
-    def tracked(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().tracked())
-    }
-
-    @property
-    def p1_tracked(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().p1_tracked())
-    }
-
-    @property
-    def added(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().added())
-    }
-
-    @property
-    def modified(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().modified())
-    }
-
-    @property
-    def p2_info(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().p2_info())
-    }
-
-    @property
-    def removed(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().removed())
-    }
-
-    @property
-    def maybe_clean(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().maybe_clean())
-    }
-
-    @property
-    def any_tracked(&self) -> PyResult<bool> {
-        Ok(self.entry(py).get().any_tracked())
-    }
-
-    def mtime_likely_equal_to(&self, other: (u32, u32, bool))
-        -> PyResult<bool> {
-        if let Some(mtime) = self.entry(py).get().truncated_mtime() {
-            Ok(mtime.likely_equal(timestamp(py, other)?))
-        } else {
-            Ok(false)
-        }
-    }
-
-    def drop_merge_data(&self) -> PyResult<PyNone> {
-        self.update(py, |entry| entry.drop_merge_data());
-        Ok(PyNone)
-    }
-
-    def set_clean(
-        &self,
-        mode: u32,
-        size: u32,
-        mtime: (u32, u32, bool),
-    ) -> PyResult<PyNone> {
-        let mtime = timestamp(py, mtime)?;
-        self.update(py, |entry| entry.set_clean(mode, size, mtime));
-        Ok(PyNone)
-    }
-
-    def set_possibly_dirty(&self) -> PyResult<PyNone> {
-        self.update(py, |entry| entry.set_possibly_dirty());
-        Ok(PyNone)
-    }
-
-    def set_tracked(&self) -> PyResult<PyNone> {
-        self.update(py, |entry| entry.set_tracked());
-        Ok(PyNone)
-    }
-
-    def set_untracked(&self) -> PyResult<PyNone> {
-        self.update(py, |entry| entry.set_untracked());
-        Ok(PyNone)
-    }
-});
-
-impl DirstateItem {
-    pub fn new_as_pyobject(
-        py: Python<'_>,
-        entry: DirstateEntry,
-    ) -> PyResult<PyObject> {
-        Ok(DirstateItem::create_instance(py, Cell::new(entry))?.into_object())
-    }
-
-    pub fn get_entry(&self, py: Python<'_>) -> DirstateEntry {
-        self.entry(py).get()
-    }
-
-    // TODO: Use https://doc.rust-lang.org/std/cell/struct.Cell.html#method.update instead when it’s stable
-    pub fn update(&self, py: Python<'_>, f: impl FnOnce(&mut DirstateEntry)) {
-        let mut entry = self.entry(py).get();
-        f(&mut entry);
-        self.entry(py).set(entry)
-    }
-}
-
-pub(crate) fn timestamp(
-    py: Python<'_>,
-    (s, ns, second_ambiguous): (u32, u32, bool),
-) -> PyResult<TruncatedTimestamp> {
-    TruncatedTimestamp::from_already_truncated(s, ns, second_ambiguous)
-        .map_err(|_| {
-            PyErr::new::<exc::ValueError, _>(
-                py,
-                "expected mtime truncated to 31 bits",
-            )
-        })
-}
diff --git a/rust/hg-cpython/src/dirstate/status.rs b/rust/hg-cpython/src/dirstate/status.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXJzdGF0ZS9zdGF0dXMucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/dirstate/status.rs
+++ /dev/null
@@ -1,326 +0,0 @@
-// status.rs
-//
-// Copyright 2019, Raphaël Gomès <rgomes@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::status` module provided by the
-//! `hg-core` crate. From Python, this will be seen as
-//! `rustext.dirstate.status`.
-
-use crate::{dirstate::DirstateMap, exceptions::FallbackError};
-use cpython::{
-    exc::ValueError, ObjectProtocol, PyBool, PyBytes, PyErr, PyList, PyObject,
-    PyResult, PyTuple, Python, PythonObject, ToPyObject,
-};
-use hg::dirstate::status::{
-    BadMatch, DirstateStatus, StatusError, StatusOptions, StatusPath,
-};
-use hg::filepatterns::{
-    parse_pattern_syntax_kind, IgnorePattern, PatternError, PatternFileWarning,
-};
-use hg::matchers::{
-    DifferenceMatcher, IntersectionMatcher, Matcher, NeverMatcher,
-    PatternMatcher, UnionMatcher,
-};
-use hg::{
-    matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
-    utils::{
-        files::{get_bytes_from_path, get_path_from_bytes},
-        hg_path::{HgPath, HgPathBuf},
-    },
-};
-use std::borrow::Borrow;
-
-fn collect_status_path_list(py: Python, paths: &[StatusPath<'_>]) -> PyList {
-    collect_pybytes_list(py, paths.iter().map(|item| &*item.path))
-}
-
-/// This will be useless once trait impls for collection are added to `PyBytes`
-/// upstream.
-fn collect_pybytes_list(
-    py: Python,
-    iter: impl Iterator<Item = impl AsRef<HgPath>>,
-) -> PyList {
-    let list = PyList::new(py, &[]);
-
-    for path in iter {
-        list.append(
-            py,
-            PyBytes::new(py, path.as_ref().as_bytes()).into_object(),
-        )
-    }
-
-    list
-}
-
-fn collect_bad_matches(
-    py: Python,
-    collection: &[(impl AsRef<HgPath>, BadMatch)],
-) -> PyResult<PyList> {
-    let list = PyList::new(py, &[]);
-
-    let os = py.import("os")?;
-    let get_error_message = |code: i32| -> PyResult<_> {
-        os.call(
-            py,
-            "strerror",
-            PyTuple::new(py, &[code.to_py_object(py).into_object()]),
-            None,
-        )
-    };
-
-    for (path, bad_match) in collection.iter() {
-        let message = match bad_match {
-            BadMatch::OsError(code) => get_error_message(*code)?,
-            BadMatch::BadType(bad_type) => {
-                format!("unsupported file type (type is {})", bad_type)
-                    .to_py_object(py)
-                    .into_object()
-            }
-        };
-        list.append(
-            py,
-            (PyBytes::new(py, path.as_ref().as_bytes()), message)
-                .to_py_object(py)
-                .into_object(),
-        )
-    }
-
-    Ok(list)
-}
-
-fn handle_fallback(py: Python, err: StatusError) -> PyErr {
-    match err {
-        StatusError::Pattern(e) => {
-            let as_string = e.to_string();
-            log::trace!("Rust status fallback: `{}`", &as_string);
-
-            PyErr::new::<FallbackError, _>(py, &as_string)
-        }
-        e => PyErr::new::<ValueError, _>(py, e.to_string()),
-    }
-}
-
-pub fn status_wrapper(
-    py: Python,
-    dmap: DirstateMap,
-    matcher: PyObject,
-    root_dir: PyObject,
-    ignore_files: PyList,
-    check_exec: bool,
-    list_clean: bool,
-    list_ignored: bool,
-    list_unknown: bool,
-    collect_traversed_dirs: bool,
-) -> PyResult<PyTuple> {
-    let bytes = root_dir.extract::<PyBytes>(py)?;
-    let root_dir = get_path_from_bytes(bytes.data(py));
-
-    let dmap: DirstateMap = dmap.to_py_object(py);
-    let mut dmap = dmap.get_inner_mut(py);
-
-    let ignore_files: PyResult<Vec<_>> = ignore_files
-        .iter(py)
-        .map(|b| {
-            let file = b.extract::<PyBytes>(py)?;
-            Ok(get_path_from_bytes(file.data(py)).to_owned())
-        })
-        .collect();
-    let ignore_files = ignore_files?;
-    // The caller may call `copymap.items()` separately
-    let list_copies = false;
-
-    let after_status = |res: Result<(DirstateStatus<'_>, _), StatusError>| {
-        let (status_res, warnings) =
-            res.map_err(|e| handle_fallback(py, e))?;
-        build_response(py, status_res, warnings)
-    };
-
-    let matcher = extract_matcher(py, matcher)?;
-    dmap.with_status(
-        &*matcher,
-        root_dir.to_path_buf(),
-        ignore_files,
-        StatusOptions {
-            check_exec,
-            list_clean,
-            list_ignored,
-            list_unknown,
-            list_copies,
-            collect_traversed_dirs,
-        },
-        after_status,
-    )
-}
-
-fn collect_kindpats(
-    py: Python,
-    matcher: PyObject,
-) -> PyResult<Vec<IgnorePattern>> {
-    matcher
-        .getattr(py, "_kindpats")?
-        .iter(py)?
-        .map(|k| {
-            let k = k?;
-            let syntax = parse_pattern_syntax_kind(
-                k.get_item(py, 0)?.extract::<PyBytes>(py)?.data(py),
-            )
-            .map_err(|e| handle_fallback(py, StatusError::Pattern(e)))?;
-            let pattern = k.get_item(py, 1)?.extract::<PyBytes>(py)?;
-            let pattern = pattern.data(py);
-            let source = k.get_item(py, 2)?.extract::<PyBytes>(py)?;
-            let source = get_path_from_bytes(source.data(py));
-            let new = IgnorePattern::new(syntax, pattern, source);
-            Ok(new)
-        })
-        .collect()
-}
-
-/// Transform a Python matcher into a Rust matcher.
-fn extract_matcher(
-    py: Python,
-    matcher: PyObject,
-) -> PyResult<Box<dyn Matcher + Sync>> {
-    let tampered = matcher
-        .call_method(py, "was_tampered_with_nonrec", PyTuple::empty(py), None)?
-        .extract::<PyBool>(py)?
-        .is_true();
-    if tampered {
-        return Err(handle_fallback(
-            py,
-            StatusError::Pattern(PatternError::UnsupportedSyntax(
-                "Pattern matcher was tampered with!".to_string(),
-            )),
-        ));
-    };
-    match matcher.get_type(py).name(py).borrow() {
-        "alwaysmatcher" => Ok(Box::new(AlwaysMatcher)),
-        "nevermatcher" => Ok(Box::new(NeverMatcher)),
-        "exactmatcher" => {
-            let files = matcher.call_method(
-                py,
-                "files",
-                PyTuple::new(py, &[]),
-                None,
-            )?;
-            let files: PyList = files.cast_into(py)?;
-            let files: PyResult<Vec<HgPathBuf>> = files
-                .iter(py)
-                .map(|f| {
-                    Ok(HgPathBuf::from_bytes(
-                        f.extract::<PyBytes>(py)?.data(py),
-                    ))
-                })
-                .collect();
-
-            let files = files?;
-            let file_matcher = FileMatcher::new(files)
-                .map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?;
-            Ok(Box::new(file_matcher))
-        }
-        "includematcher" => {
-            // Get the patterns from Python even though most of them are
-            // redundant with those we will parse later on, as they include
-            // those passed from the command line.
-            let ignore_patterns = collect_kindpats(py, matcher)?;
-
-            let matcher = IncludeMatcher::new(ignore_patterns)
-                .map_err(|e| handle_fallback(py, e.into()))?;
-
-            Ok(Box::new(matcher))
-        }
-        "unionmatcher" => {
-            let matchers: PyResult<Vec<_>> = matcher
-                .getattr(py, "_matchers")?
-                .iter(py)?
-                .map(|py_matcher| extract_matcher(py, py_matcher?))
-                .collect();
-
-            Ok(Box::new(UnionMatcher::new(matchers?)))
-        }
-        "intersectionmatcher" => {
-            let m1 = extract_matcher(py, matcher.getattr(py, "_m1")?)?;
-            let m2 = extract_matcher(py, matcher.getattr(py, "_m2")?)?;
-
-            Ok(Box::new(IntersectionMatcher::new(m1, m2)))
-        }
-        "differencematcher" => {
-            let m1 = extract_matcher(py, matcher.getattr(py, "_m1")?)?;
-            let m2 = extract_matcher(py, matcher.getattr(py, "_m2")?)?;
-
-            Ok(Box::new(DifferenceMatcher::new(m1, m2)))
-        }
-        "patternmatcher" => {
-            let patterns = collect_kindpats(py, matcher)?;
-
-            let matcher = PatternMatcher::new(patterns)
-                .map_err(|e| handle_fallback(py, e.into()))?;
-
-            Ok(Box::new(matcher))
-        }
-        e => Err(PyErr::new::<FallbackError, _>(
-            py,
-            format!("Unsupported matcher {}", e),
-        )),
-    }
-}
-
-fn build_response(
-    py: Python,
-    status_res: DirstateStatus,
-    warnings: Vec<PatternFileWarning>,
-) -> PyResult<PyTuple> {
-    let modified = collect_status_path_list(py, &status_res.modified);
-    let added = collect_status_path_list(py, &status_res.added);
-    let removed = collect_status_path_list(py, &status_res.removed);
-    let deleted = collect_status_path_list(py, &status_res.deleted);
-    let clean = collect_status_path_list(py, &status_res.clean);
-    let ignored = collect_status_path_list(py, &status_res.ignored);
-    let unknown = collect_status_path_list(py, &status_res.unknown);
-    let unsure = collect_status_path_list(py, &status_res.unsure);
-    let bad = collect_bad_matches(py, &status_res.bad)?;
-    let traversed = collect_pybytes_list(py, status_res.traversed.iter());
-    let dirty = status_res.dirty.to_py_object(py);
-    let py_warnings = PyList::new(py, &[]);
-    for warning in warnings.iter() {
-        // We use duck-typing on the Python side for dispatch, good enough for
-        // now.
-        match warning {
-            PatternFileWarning::InvalidSyntax(file, syn) => {
-                py_warnings.append(
-                    py,
-                    (
-                        PyBytes::new(py, &get_bytes_from_path(file)),
-                        PyBytes::new(py, syn),
-                    )
-                        .to_py_object(py)
-                        .into_object(),
-                );
-            }
-            PatternFileWarning::NoSuchFile(file) => py_warnings.append(
-                py,
-                PyBytes::new(py, &get_bytes_from_path(file)).into_object(),
-            ),
-        }
-    }
-
-    Ok(PyTuple::new(
-        py,
-        &[
-            unsure.into_object(),
-            modified.into_object(),
-            added.into_object(),
-            removed.into_object(),
-            deleted.into_object(),
-            clean.into_object(),
-            ignored.into_object(),
-            unknown.into_object(),
-            py_warnings.into_object(),
-            bad.into_object(),
-            traversed.into_object(),
-            dirty.into_object(),
-        ][..],
-    ))
-}
diff --git a/rust/hg-cpython/src/discovery.rs b/rust/hg-cpython/src/discovery.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9kaXNjb3ZlcnkucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/discovery.rs
+++ /dev/null
@@ -1,277 +0,0 @@
-// discovery.rs
-//
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for the `hg::discovery` module provided by the
-//! `hg-core` crate. From Python, this will be seen as `rustext.discovery`
-//!
-//! # Classes visible from Python:
-//! - [`PartialDiscovery`] is the Rust implementation of
-//!   `mercurial.setdiscovery.partialdiscovery`.
-
-use crate::PyRevision;
-use crate::{
-    conversion::rev_pyiter_collect, exceptions::GraphError,
-    revlog::PySharedIndex,
-};
-use cpython::{
-    ObjectProtocol, PyClone, PyDict, PyModule, PyObject, PyResult, PyTuple,
-    Python, PythonObject, ToPyObject, UnsafePyLeaked,
-};
-use hg::discovery::PartialDiscovery as CorePartialDiscovery;
-use hg::Revision;
-use std::collections::HashSet;
-
-use std::cell::RefCell;
-
-use crate::revlog::py_rust_index_to_graph;
-
-py_class!(pub class PartialDiscovery |py| {
-    data inner: RefCell<UnsafePyLeaked<CorePartialDiscovery<PySharedIndex>>>;
-    data index: RefCell<UnsafePyLeaked<PySharedIndex>>;
-
-    // `_respectsize` is currently only here to replicate the Python API and
-    // will be used in future patches inside methods that are yet to be
-    // implemented.
-    def __new__(
-        _cls,
-        repo: PyObject,
-        targetheads: PyObject,
-        respectsize: bool,
-        randomize: bool = true
-    ) -> PyResult<PartialDiscovery> {
-        Self::inner_new(py, repo, targetheads, respectsize, randomize)
-    }
-
-    def addcommons(&self, commons: PyObject) -> PyResult<PyObject> {
-        self.inner_addcommons(py, commons)
-    }
-
-    def addmissings(&self, missings: PyObject) -> PyResult<PyObject> {
-        self.inner_addmissings(py, missings)
-    }
-
-    def addinfo(&self, sample: PyObject) -> PyResult<PyObject> {
-        self.inner_addinfo(py, sample)
-    }
-
-    def hasinfo(&self) -> PyResult<bool> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe { leaked.try_borrow(py)? };
-        Ok(inner.has_info())
-    }
-
-    def iscomplete(&self) -> PyResult<bool> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe { leaked.try_borrow(py)? };
-        Ok(inner.is_complete())
-    }
-
-    def stats(&self) -> PyResult<PyDict> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe { leaked.try_borrow(py)? };
-        let stats = inner.stats();
-        let as_dict: PyDict = PyDict::new(py);
-        as_dict.set_item(py, "undecided",
-                         stats.undecided.map(
-                             |l| l.to_py_object(py).into_object())
-                             .unwrap_or_else(|| py.None()))?;
-        Ok(as_dict)
-    }
-
-    def commonheads(&self) -> PyResult<HashSet<PyRevision>> {
-        let leaked = self.inner(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let inner = unsafe { leaked.try_borrow(py)? };
-        let res = inner.common_heads()
-                    .map_err(|e| GraphError::pynew(py, e))?;
-        Ok(res.into_iter().map(Into::into).collect())
-    }
-
-    def takefullsample(&self, headrevs: PyObject,
-                       size: usize) -> PyResult<PyObject> {
-        self.inner_takefullsample(py, headrevs, size)
-    }
-
-    def takequicksample(&self, headrevs: PyObject,
-                        size: usize) -> PyResult<PyObject> {
-        self.inner_takequicksample(py, headrevs, size)
-    }
-
-});
-
-impl PartialDiscovery {
-    fn inner_new(
-        py: Python,
-        repo: PyObject,
-        targetheads: PyObject,
-        respectsize: bool,
-        randomize: bool,
-    ) -> PyResult<Self> {
-        let index = repo.getattr(py, "changelog")?.getattr(py, "index")?;
-        let cloned_index = py_rust_index_to_graph(py, index.clone_ref(py))?;
-        let index = py_rust_index_to_graph(py, index)?;
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let target_heads = {
-            let borrowed_idx = unsafe { index.try_borrow(py)? };
-            rev_pyiter_collect(py, &targetheads, &*borrowed_idx)?
-        };
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let lazy_disco = unsafe {
-            index.map(py, |idx| {
-                CorePartialDiscovery::new(
-                    idx,
-                    target_heads,
-                    respectsize,
-                    randomize,
-                )
-            })
-        };
-        Self::create_instance(
-            py,
-            RefCell::new(lazy_disco),
-            RefCell::new(cloned_index),
-        )
-    }
-
-    /// Convert a Python iterator of revisions into a vector
-    fn pyiter_to_vec(
-        &self,
-        py: Python,
-        iter: &PyObject,
-    ) -> PyResult<Vec<Revision>> {
-        let leaked = self.index(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let index = unsafe { leaked.try_borrow(py)? };
-        rev_pyiter_collect(py, iter, &*index)
-    }
-
-    fn inner_addinfo(
-        &self,
-        py: Python,
-        sample: PyObject,
-    ) -> PyResult<PyObject> {
-        let mut missing: Vec<Revision> = Vec::new();
-        let mut common: Vec<Revision> = Vec::new();
-        for info in sample.iter(py)? {
-            // info is a pair (Revision, bool)
-            let mut revknown = info?.iter(py)?;
-            let rev: PyRevision = revknown.next().unwrap()?.extract(py)?;
-            // This is fine since we're just using revisions as integers
-            // for the purposes of discovery
-            let rev = Revision(rev.0);
-            let known: bool = revknown.next().unwrap()?.extract(py)?;
-            if known {
-                common.push(rev);
-            } else {
-                missing.push(rev);
-            }
-        }
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        inner
-            .add_common_revisions(common)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        inner
-            .add_missing_revisions(missing)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        Ok(py.None())
-    }
-
-    fn inner_addcommons(
-        &self,
-        py: Python,
-        commons: PyObject,
-    ) -> PyResult<PyObject> {
-        let commons_vec = self.pyiter_to_vec(py, &commons)?;
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        inner
-            .add_common_revisions(commons_vec)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        Ok(py.None())
-    }
-
-    fn inner_addmissings(
-        &self,
-        py: Python,
-        missings: PyObject,
-    ) -> PyResult<PyObject> {
-        let missings_vec = self.pyiter_to_vec(py, &missings)?;
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        inner
-            .add_missing_revisions(missings_vec)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        Ok(py.None())
-    }
-
-    fn inner_takefullsample(
-        &self,
-        py: Python,
-        _headrevs: PyObject,
-        size: usize,
-    ) -> PyResult<PyObject> {
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        let sample = inner
-            .take_full_sample(size)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        let as_vec: Vec<PyObject> = sample
-            .iter()
-            .map(|rev| PyRevision(rev.0).to_py_object(py).into_object())
-            .collect();
-        Ok(PyTuple::new(py, as_vec.as_slice()).into_object())
-    }
-
-    fn inner_takequicksample(
-        &self,
-        py: Python,
-        headrevs: PyObject,
-        size: usize,
-    ) -> PyResult<PyObject> {
-        let revsvec = self.pyiter_to_vec(py, &headrevs)?;
-        let mut leaked = self.inner(py).borrow_mut();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let mut inner = unsafe { leaked.try_borrow_mut(py)? };
-        let sample = inner
-            .take_quick_sample(revsvec, size)
-            .map_err(|e| GraphError::pynew(py, e))?;
-        let as_vec: Vec<PyObject> = sample
-            .iter()
-            .map(|rev| PyRevision(rev.0).to_py_object(py).into_object())
-            .collect();
-        Ok(PyTuple::new(py, as_vec.as_slice()).into_object())
-    }
-}
-
-/// Create the module, with __package__ given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.discovery", package);
-    let m = PyModule::new(py, dotted_name)?;
-    m.add(py, "__package__", package)?;
-    m.add(
-        py,
-        "__doc__",
-        "Discovery of common node sets - Rust implementation",
-    )?;
-    m.add_class::<PartialDiscovery>(py)?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-    // Example C code (see pyexpat.c and import.c) will "give away the
-    // reference", but we won't because it will be consumed once the
-    // Rust PyObject is dropped.
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/exceptions.rs b/rust/hg-cpython/src/exceptions.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9leGNlcHRpb25zLnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/exceptions.rs
+++ /dev/null
@@ -1,66 +0,0 @@
-// ancestors.rs
-//
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Bindings for Rust errors
-//!
-//! [`GraphError`] exposes `hg::GraphError` as a subclass of `ValueError`
-//! but some variants of `hg::GraphError` can be converted directly to other
-//! existing Python exceptions if appropriate.
-//!
-//! [`GraphError`]: struct.GraphError.html
-use cpython::{
-    exc::{RuntimeError, ValueError},
-    py_exception, PyErr, Python,
-};
-use hg;
-
-use crate::PyRevision;
-
-py_exception!(rustext, GraphError, ValueError);
-
-impl GraphError {
-    pub fn pynew(py: Python, inner: hg::GraphError) -> PyErr {
-        match inner {
-            hg::GraphError::ParentOutOfRange(r) => {
-                GraphError::new(py, ("ParentOutOfRange", PyRevision(r.0)))
-            }
-            hg::GraphError::ParentOutOfOrder(r) => {
-                GraphError::new(py, ("ParentOutOfOrder", PyRevision(r.0)))
-            }
-        }
-    }
-
-    pub fn pynew_from_vcsgraph(
-        py: Python,
-        inner: vcsgraph::graph::GraphReadError,
-    ) -> PyErr {
-        match inner {
-            vcsgraph::graph::GraphReadError::InconsistentGraphData => {
-                GraphError::new(py, "InconsistentGraphData")
-            }
-            vcsgraph::graph::GraphReadError::InvalidKey => {
-                GraphError::new(py, "ParentOutOfRange")
-            }
-            vcsgraph::graph::GraphReadError::KeyedInvalidKey(r) => {
-                GraphError::new(py, ("ParentOutOfRange", r))
-            }
-            vcsgraph::graph::GraphReadError::WorkingDirectoryUnsupported => {
-                match py
-                    .import("mercurial.error")
-                    .and_then(|m| m.get(py, "WdirUnsupported"))
-                {
-                    Err(e) => e,
-                    Ok(cls) => PyErr::from_instance(py, cls),
-                }
-            }
-        }
-    }
-}
-
-py_exception!(rustext, HgPathPyError, RuntimeError);
-py_exception!(rustext, FallbackError, RuntimeError);
-py_exception!(shared_ref, AlreadyBorrowed, RuntimeError);
diff --git a/rust/hg-cpython/src/lib.rs b/rust/hg-cpython/src/lib.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9saWIucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/lib.rs
+++ /dev/null
@@ -1,108 +0,0 @@
-// lib.rs
-//
-// Copyright 2018 Georges Racinet <gracinet@anybox.fr>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Python bindings of `hg-core` objects using the `cpython` crate.
-//! Once compiled, the resulting single shared library object can be placed in
-//! the `mercurial` package directly as `rustext.so` or `rustext.dll`.
-//! It holds several modules, so that from the point of view of Python,
-//! it behaves as the `cext` package.
-//!
-//! Example:
-//!
-//! ```text
-//! >>> from mercurial.rustext import ancestor
-//! >>> ancestor.__doc__
-//! 'Generic DAG ancestor algorithms - Rust implementation'
-//! ```
-#![allow(clippy::too_many_arguments)] // rust-cpython macros
-#![allow(clippy::zero_ptr)] // rust-cpython macros
-#![allow(clippy::needless_update)] // rust-cpython macros
-#![allow(clippy::manual_strip)] // rust-cpython macros
-#![allow(clippy::type_complexity)] // rust-cpython macros
-
-use cpython::{FromPyObject, PyInt, Python, ToPyObject};
-use hg::{BaseRevision, Revision};
-
-/// This crate uses nested private macros, `extern crate` is still needed in
-/// 2018 edition.
-#[macro_use]
-extern crate cpython;
-
-pub mod ancestors;
-mod cindex;
-mod conversion;
-#[macro_use]
-pub mod ref_sharing;
-pub mod copy_tracing;
-pub mod dagops;
-pub mod debug;
-pub mod dirstate;
-pub mod discovery;
-pub mod exceptions;
-mod pybytes_deref;
-pub mod revlog;
-pub mod update;
-pub mod utils;
-pub mod vfs;
-
-/// Revision as exposed to/from the Python layer.
-///
-/// We need this indirection because of the orphan rule, meaning we can't
-/// implement a foreign trait (like [`cpython::ToPyObject`])
-/// for a foreign type (like [`hg::UncheckedRevision`]).
-///
-/// This also acts as a deterrent against blindly trusting Python to send
-/// us valid revision numbers.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct PyRevision(BaseRevision);
-
-impl From<Revision> for PyRevision {
-    fn from(r: Revision) -> Self {
-        PyRevision(r.0)
-    }
-}
-
-impl<'s> FromPyObject<'s> for PyRevision {
-    fn extract(
-        py: Python,
-        obj: &'s cpython::PyObject,
-    ) -> cpython::PyResult<Self> {
-        Ok(Self(obj.extract::<BaseRevision>(py)?))
-    }
-}
-
-impl ToPyObject for PyRevision {
-    type ObjectType = PyInt;
-
-    fn to_py_object(&self, py: Python) -> Self::ObjectType {
-        self.0.to_py_object(py)
-    }
-}
-
-py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| {
-    m.add(
-        py,
-        "__doc__",
-        "Mercurial core concepts - Rust implementation",
-    )?;
-
-    let dotted_name: String = m.get(py, "__name__")?.extract(py)?;
-    m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?;
-    m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?;
-    m.add(py, "debug", debug::init_module(py, &dotted_name)?)?;
-    m.add(
-        py,
-        "copy_tracing",
-        copy_tracing::init_module(py, &dotted_name)?,
-    )?;
-    m.add(py, "discovery", discovery::init_module(py, &dotted_name)?)?;
-    m.add(py, "dirstate", dirstate::init_module(py, &dotted_name)?)?;
-    m.add(py, "revlog", revlog::init_module(py, &dotted_name)?)?;
-    m.add(py, "update", update::init_module(py, &dotted_name)?)?;
-    m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?;
-    Ok(())
-});
diff --git a/rust/hg-cpython/src/pybytes_deref.rs b/rust/hg-cpython/src/pybytes_deref.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9weWJ5dGVzX2RlcmVmLnJz..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/pybytes_deref.rs
+++ /dev/null
@@ -1,125 +0,0 @@
-use crate::cpython::buffer::Element;
-use cpython::{
-    buffer::PyBuffer, exc::ValueError, PyBytes, PyErr, PyResult, Python,
-};
-use stable_deref_trait::StableDeref;
-
-/// Safe abstraction over a `PyBytes` together with the `&[u8]` slice
-/// that borrows it. Implements `Deref<Target = [u8]>`.
-///
-/// Calling `PyBytes::data` requires a GIL marker but we want to access the
-/// data in a thread that (ideally) does not need to acquire the GIL.
-/// This type allows separating the call an the use.
-///
-/// It also enables using a (wrapped) `PyBytes` in GIL-unaware generic code.
-pub struct PyBytesDeref {
-    #[allow(unused)]
-    keep_alive: PyBytes,
-
-    /// Borrows the buffer inside `self.keep_alive`,
-    /// but the borrow-checker cannot express self-referential structs.
-    data: &'static [u8],
-}
-
-impl PyBytesDeref {
-    pub fn new(py: Python, bytes: PyBytes) -> Self {
-        let as_raw: *const [u8] = bytes.data(py);
-        Self {
-            // Safety: the raw pointer is valid as long as the PyBytes is still
-            // alive, and the objecs owns it.
-            data: unsafe { &*as_raw },
-            keep_alive: bytes,
-        }
-    }
-
-    pub fn unwrap(self) -> PyBytes {
-        self.keep_alive
-    }
-}
-
-impl std::ops::Deref for PyBytesDeref {
-    type Target = [u8];
-
-    fn deref(&self) -> &[u8] {
-        self.data
-    }
-}
-
-unsafe impl StableDeref for PyBytesDeref {}
-
-fn require_send<T: Send>() {}
-
-#[allow(unused)]
-fn static_assert_pybytes_is_send() {
-    #[allow(clippy::no_effect)]
-    require_send::<PyBytes>;
-}
-
-// Safety: PyBytes is Send. Raw pointers are not by default,
-// but here sending one to another thread is fine since we ensure it stays
-// valid.
-unsafe impl Send for PyBytesDeref {}
-
-///
-/// It also enables using a (wrapped) `PyBuffer` in GIL-unaware generic code.
-pub struct PyBufferDeref {
-    #[allow(unused)]
-    keep_alive: PyBuffer,
-
-    /// Borrows the buffer inside `self.keep_alive`,
-    /// but the borrow-checker cannot express self-referential structs.
-    data: *const [u8],
-}
-
-fn get_buffer<'a>(py: Python, buf: &'a PyBuffer) -> PyResult<&'a [u8]> {
-    let len = buf.item_count();
-
-    let cbuf = buf.buf_ptr();
-    let has_correct_item_size = std::mem::size_of::<u8>() == buf.item_size();
-    let is_valid_buffer = has_correct_item_size
-        && buf.is_c_contiguous()
-        && u8::is_compatible_format(buf.format())
-        && buf.readonly();
-
-    let bytes = if is_valid_buffer {
-        unsafe { std::slice::from_raw_parts(cbuf as *const u8, len) }
-    } else {
-        return Err(PyErr::new::<ValueError, _>(
-            py,
-            "Buffer has an invalid memory representation",
-        ));
-    };
-    Ok(bytes)
-}
-
-impl PyBufferDeref {
-    pub fn new(py: Python, buf: PyBuffer) -> PyResult<Self> {
-        Ok(Self {
-            data: get_buffer(py, &buf)?,
-            keep_alive: buf,
-        })
-    }
-}
-
-impl std::ops::Deref for PyBufferDeref {
-    type Target = [u8];
-
-    fn deref(&self) -> &[u8] {
-        // Safety: the raw pointer is valid as long as the PyBuffer is still
-        // alive, and the returned slice borrows `self`.
-        unsafe { &*self.data }
-    }
-}
-
-unsafe impl StableDeref for PyBufferDeref {}
-
-#[allow(unused)]
-fn static_assert_pybuffer_is_send() {
-    #[allow(clippy::no_effect)]
-    require_send::<PyBuffer>;
-}
-
-// Safety: PyBuffer is Send. Raw pointers are not by default,
-// but here sending one to another thread is fine since we ensure it stays
-// valid.
-unsafe impl Send for PyBufferDeref {}
diff --git a/rust/hg-cpython/src/ref_sharing.rs b/rust/hg-cpython/src/ref_sharing.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9yZWZfc2hhcmluZy5ycw==..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/ref_sharing.rs
+++ /dev/null
@@ -1,125 +0,0 @@
-// ref_sharing.rs
-//
-// Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to
-// deal in the Software without restriction, including without limitation the
-// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-// sell copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-// IN THE SOFTWARE.
-
-//! Macros for use in the `hg-cpython` bridge library.
-
-/// Defines a `py_class!` that acts as a Python iterator over a Rust iterator.
-///
-/// TODO: this is a bit awkward to use, and a better (more complicated)
-///     procedural macro would simplify the interface a lot.
-///
-/// # Parameters
-///
-/// * `$name` is the identifier to give to the resulting Rust struct.
-/// * `$leaked` corresponds to `UnsafePyLeaked` in the matching `@shared data`
-///   declaration.
-/// * `$iterator_type` is the type of the Rust iterator.
-/// * `$success_func` is a function for processing the Rust `(key, value)`
-///   tuple on iteration success, turning it into something Python understands.
-/// * `$success_func` is the return type of `$success_func`
-///
-/// # Safety
-///
-/// `$success_func` may take a reference, but it's lifetime may be cheated.
-/// Do not copy it out of the function call.
-///
-/// # Example
-/// ```
-/// use cpython::*;
-/// use std::collections::hash_map::{HashMap, Iter as HashMapIter};
-/// use rusthg::py_shared_iterator;
-///
-/// pub struct MyStruct {
-///     inner: HashMap<Vec<u8>, Vec<u8>>
-/// }
-///
-/// py_class!(pub class MyType |py| {
-///     @shared data inner_shared: MyStruct;
-///
-///     def __iter__(&self) -> PyResult<MyTypeItemsIterator> {
-///         let leaked_ref = self.inner_shared(py).leak_immutable();
-///         MyTypeItemsIterator::from_inner(
-///             py,
-///             unsafe { leaked_ref.map(py, |o| o.inner.iter()) },
-///         )
-///     }
-/// });
-///
-/// impl MyType {
-///     fn translate_key_value(
-///         py: Python,
-///         res: (&Vec<u8>, &Vec<u8>),
-///     ) -> PyResult<Option<(PyBytes, PyBytes)>> {
-///         let (f, entry) = res;
-///         Ok(Some((
-///             PyBytes::new(py, f),
-///             PyBytes::new(py, entry),
-///         )))
-///     }
-/// }
-///
-/// py_shared_iterator!(
-///     MyTypeItemsIterator,
-///     UnsafePyLeaked<HashMapIter<'static, Vec<u8>, Vec<u8>>>,
-///     MyType::translate_key_value,
-///     Option<(PyBytes, PyBytes)>
-/// );
-/// ```
-#[macro_export]
-macro_rules! py_shared_iterator {
-    (
-        $name: ident,
-        $leaked: ty,
-        $success_func: expr,
-        $success_type: ty
-    ) => {
-        py_class!(pub class $name |py| {
-            data inner: std::cell::RefCell<$leaked>;
-
-            def __next__(&self) -> cpython::PyResult<$success_type> {
-                let mut leaked = self.inner(py).borrow_mut();
-                let mut iter = unsafe { leaked.try_borrow_mut(py)? };
-                match iter.next() {
-                    None => Ok(None),
-                    // res may be a reference of cheated 'static lifetime
-                    Some(res) => $success_func(py, res),
-                }
-            }
-
-            def __iter__(&self) -> cpython::PyResult<Self> {
-                Ok(self.clone_ref(py))
-            }
-        });
-
-        impl $name {
-            pub fn from_inner(
-                py: Python,
-                leaked: $leaked,
-            ) -> cpython::PyResult<Self> {
-                Self::create_instance(
-                    py,
-                    std::cell::RefCell::new(leaked),
-                )
-            }
-        }
-    };
-}
diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy9yZXZsb2cucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/revlog.rs
+++ /dev/null
@@ -1,2304 +0,0 @@
-// revlog.rs
-//
-// Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-#![allow(non_snake_case)]
-
-use crate::{
-    conversion::{rev_pyiter_collect, rev_pyiter_collect_or_else},
-    pybytes_deref::{PyBufferDeref, PyBytesDeref},
-    utils::{node_from_py_bytes, node_from_py_object},
-    PyRevision,
-};
-use cpython::{
-    buffer::{Element, PyBuffer},
-    exc::{IndexError, ValueError},
-    ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyInt, PyList,
-    PyModule, PyObject, PyResult, PySet, PySharedRef, PyTuple, PyType, Python,
-    PythonObject, ToPyObject, UnsafePyLeaked,
-};
-use hg::{
-    errors::HgError,
-    fncache::FnCache,
-    revlog::{
-        compression::CompressionConfig,
-        index::{
-            Index, IndexHeader, Phase, RevisionDataParams, SnapshotsCache,
-            INDEX_ENTRY_SIZE,
-        },
-        inner_revlog::{InnerRevlog as CoreInnerRevlog, RevisionBuffer},
-        nodemap::{Block, NodeMap, NodeMapError, NodeTree as CoreNodeTree},
-        options::{
-            RevlogDataConfig, RevlogDeltaConfig, RevlogFeatureConfig,
-            RevlogOpenOptions,
-        },
-        Graph, NodePrefix, RevlogError, RevlogIndex, RevlogType,
-    },
-    transaction::Transaction,
-    utils::files::{get_bytes_from_path, get_path_from_bytes},
-    vfs::FnCacheVfs,
-    BaseRevision, Node, Revision, UncheckedRevision, NULL_REVISION,
-};
-use std::{
-    cell::{Cell, RefCell},
-    collections::{HashMap, HashSet},
-    sync::atomic::{AtomicBool, AtomicUsize, Ordering},
-    sync::OnceLock,
-};
-use vcsgraph::graph::Graph as VCSGraph;
-
-pub struct PySharedIndex {
-    /// The underlying hg-core index
-    pub inner: &'static Index,
-}
-
-/// Return a Struct implementing the Graph trait
-pub(crate) fn py_rust_index_to_graph(
-    py: Python,
-    index_proxy: PyObject,
-) -> PyResult<UnsafePyLeaked<PySharedIndex>> {
-    let inner_revlog = index_proxy.getattr(py, "inner")?;
-    let inner_revlog = inner_revlog.extract::<InnerRevlog>(py)?;
-    let leaked = inner_revlog.inner(py).leak_immutable();
-    // Safety: we don't leak the "faked" reference out of the `UnsafePyLeaked`
-    Ok(unsafe { leaked.map(py, |idx| PySharedIndex { inner: &idx.index }) })
-}
-
-impl Clone for PySharedIndex {
-    fn clone(&self) -> Self {
-        Self { inner: self.inner }
-    }
-}
-
-impl Graph for PySharedIndex {
-    #[inline(always)]
-    fn parents(&self, rev: Revision) -> Result<[Revision; 2], hg::GraphError> {
-        self.inner.parents(rev)
-    }
-}
-
-impl VCSGraph for PySharedIndex {
-    #[inline(always)]
-    fn parents(
-        &self,
-        rev: BaseRevision,
-    ) -> Result<vcsgraph::graph::Parents, vcsgraph::graph::GraphReadError>
-    {
-        // FIXME This trait should be reworked to decide between Revision
-        // and UncheckedRevision, get better errors names, etc.
-        match Graph::parents(self, Revision(rev)) {
-            Ok(parents) => {
-                Ok(vcsgraph::graph::Parents([parents[0].0, parents[1].0]))
-            }
-            Err(hg::GraphError::ParentOutOfRange(rev)) => {
-                Err(vcsgraph::graph::GraphReadError::KeyedInvalidKey(rev.0))
-            }
-            Err(hg::GraphError::ParentOutOfOrder(_)) => {
-                Err(vcsgraph::graph::GraphReadError::InconsistentGraphData)
-            }
-        }
-    }
-}
-
-impl RevlogIndex for PySharedIndex {
-    fn len(&self) -> usize {
-        self.inner.len()
-    }
-    fn node(&self, rev: Revision) -> Option<&Node> {
-        self.inner.node(rev)
-    }
-}
-
-/// Take a (potentially) mmap'ed buffer, and return the underlying Python
-/// buffer along with the Rust slice into said buffer. We need to keep the
-/// Python buffer around, otherwise we'd get a dangling pointer once the buffer
-/// is freed from Python's side.
-///
-/// # Safety
-///
-/// The caller must make sure that the buffer is kept around for at least as
-/// long as the slice.
-#[deny(unsafe_op_in_unsafe_fn)]
-unsafe fn mmap_keeparound(
-    py: Python,
-    data: PyObject,
-) -> PyResult<(
-    PyBuffer,
-    Box<dyn std::ops::Deref<Target = [u8]> + Send + Sync + 'static>,
-)> {
-    let buf = PyBuffer::get(py, &data)?;
-    let len = buf.item_count();
-
-    // Build a slice from the mmap'ed buffer data
-    let cbuf = buf.buf_ptr();
-    let bytes = if std::mem::size_of::<u8>() == buf.item_size()
-        && buf.is_c_contiguous()
-        && u8::is_compatible_format(buf.format())
-    {
-        unsafe { std::slice::from_raw_parts(cbuf as *const u8, len) }
-    } else {
-        return Err(PyErr::new::<ValueError, _>(
-            py,
-            "buffer has an invalid memory representation".to_string(),
-        ));
-    };
-
-    Ok((buf, Box::new(bytes)))
-}
-
-fn py_tuple_to_revision_data_params(
-    py: Python,
-    tuple: PyTuple,
-) -> PyResult<RevisionDataParams> {
-    if tuple.len(py) < 8 {
-        // this is better than the panic promised by tup.get_item()
-        return Err(PyErr::new::<IndexError, _>(
-            py,
-            "tuple index out of range",
-        ));
-    }
-    let offset_or_flags: u64 = tuple.get_item(py, 0).extract(py)?;
-    let node_id = tuple
-        .get_item(py, 7)
-        .extract::<PyBytes>(py)?
-        .data(py)
-        .try_into()
-        .expect("nodeid should be set");
-    let flags = (offset_or_flags & 0xFFFF) as u16;
-    let data_offset = offset_or_flags >> 16;
-    Ok(RevisionDataParams {
-        flags,
-        data_offset,
-        data_compressed_length: tuple.get_item(py, 1).extract(py)?,
-        data_uncompressed_length: tuple.get_item(py, 2).extract(py)?,
-        data_delta_base: tuple.get_item(py, 3).extract(py)?,
-        link_rev: tuple.get_item(py, 4).extract(py)?,
-        parent_rev_1: tuple.get_item(py, 5).extract(py)?,
-        parent_rev_2: tuple.get_item(py, 6).extract(py)?,
-        node_id,
-        ..Default::default()
-    })
-}
-fn revision_data_params_to_py_tuple(
-    py: Python,
-    params: RevisionDataParams,
-) -> PyTuple {
-    PyTuple::new(
-        py,
-        &[
-            params.data_offset.into_py_object(py).into_object(),
-            params
-                .data_compressed_length
-                .into_py_object(py)
-                .into_object(),
-            params
-                .data_uncompressed_length
-                .into_py_object(py)
-                .into_object(),
-            params.data_delta_base.into_py_object(py).into_object(),
-            params.link_rev.into_py_object(py).into_object(),
-            params.parent_rev_1.into_py_object(py).into_object(),
-            params.parent_rev_2.into_py_object(py).into_object(),
-            PyBytes::new(py, &params.node_id)
-                .into_py_object(py)
-                .into_object(),
-            params._sidedata_offset.into_py_object(py).into_object(),
-            params
-                ._sidedata_compressed_length
-                .into_py_object(py)
-                .into_object(),
-            params
-                .data_compression_mode
-                .into_py_object(py)
-                .into_object(),
-            params
-                ._sidedata_compression_mode
-                .into_py_object(py)
-                .into_object(),
-            params._rank.into_py_object(py).into_object(),
-        ],
-    )
-}
-
-struct PySnapshotsCache<'p> {
-    py: Python<'p>,
-    dict: PyDict,
-}
-
-impl<'p> SnapshotsCache for PySnapshotsCache<'p> {
-    fn insert_for(
-        &mut self,
-        rev: BaseRevision,
-        value: BaseRevision,
-    ) -> Result<(), RevlogError> {
-        let pyvalue = value.into_py_object(self.py).into_object();
-        match self.dict.get_item(self.py, rev) {
-            Some(obj) => obj
-                .extract::<PySet>(self.py)
-                .and_then(|set| set.add(self.py, pyvalue)),
-            None => PySet::new(self.py, vec![pyvalue])
-                .and_then(|set| self.dict.set_item(self.py, rev, set)),
-        }
-        .map_err(|_| {
-            RevlogError::Other(HgError::unsupported(
-                "Error in Python caches handling",
-            ))
-        })
-    }
-}
-
-// There are no static generics in Rust (because their implementation is hard,
-// I'm guessing it's due to different compilation stages, etc.).
-// So manually generate all three caches and use them in `with_filelog_cache`.
-static DELTA_CONFIG_CACHE: OnceLock<(PyObject, RevlogDeltaConfig)> =
-    OnceLock::new();
-static DATA_CONFIG_CACHE: OnceLock<(PyObject, RevlogDataConfig)> =
-    OnceLock::new();
-static FEATURE_CONFIG_CACHE: OnceLock<(PyObject, RevlogFeatureConfig)> =
-    OnceLock::new();
-
-/// Cache the first conversion from Python -> Rust config for all filelogs to
-/// save on conversion time when called in a loop.
-fn with_filelog_cache<T: Copy>(
-    py: Python,
-    py_config: &PyObject,
-    revlog_type: RevlogType,
-    cache: &OnceLock<(PyObject, T)>,
-    callback: impl Fn() -> PyResult<T>,
-) -> PyResult<T> {
-    let mut was_cached = false;
-    if revlog_type == RevlogType::Filelog {
-        if let Some((cached_py_config, rust_config)) = cache.get() {
-            was_cached = true;
-            // All filelogs in a given repository *most likely* have the
-            // exact same config, but it's not impossible that some extensions
-            // do some magic with configs or that this code will be used
-            // for longer-running processes. So compare the source `PyObject`
-            // in case the source changed, at the cost of some overhead.
-            // We can't use `py_config.eq(cached_py_config)` because all config
-            // objects are different in Python and `a is b` is false.
-            if py_config.compare(py, cached_py_config)?.is_eq() {
-                return Ok(*rust_config);
-            }
-        }
-    }
-    let config = callback()?;
-    // Do not call the lock unnecessarily if it's already been set.
-    if !was_cached && revlog_type == RevlogType::Filelog {
-        cache.set((py_config.clone_ref(py), config)).ok();
-    }
-    Ok(config)
-}
-
-fn extract_delta_config(
-    py: Python,
-    py_config: PyObject,
-    revlog_type: RevlogType,
-) -> PyResult<RevlogDeltaConfig> {
-    let get_delta_config = || {
-        let max_deltachain_span = py_config
-            .getattr(py, "max_deltachain_span")?
-            .extract::<i64>(py)?;
-
-        let revlog_delta_config = RevlogDeltaConfig {
-            general_delta: py_config
-                .getattr(py, "general_delta")?
-                .extract(py)?,
-            sparse_revlog: py_config
-                .getattr(py, "sparse_revlog")?
-                .extract(py)?,
-            max_chain_len: py_config
-                .getattr(py, "max_chain_len")?
-                .extract(py)?,
-            max_deltachain_span: if max_deltachain_span < 0 {
-                None
-            } else {
-                Some(max_deltachain_span as u64)
-            },
-            upper_bound_comp: py_config
-                .getattr(py, "upper_bound_comp")?
-                .extract(py)?,
-            delta_both_parents: py_config
-                .getattr(py, "delta_both_parents")?
-                .extract(py)?,
-            candidate_group_chunk_size: py_config
-                .getattr(py, "candidate_group_chunk_size")?
-                .extract(py)?,
-            debug_delta: py_config.getattr(py, "debug_delta")?.extract(py)?,
-            lazy_delta: py_config.getattr(py, "lazy_delta")?.extract(py)?,
-            lazy_delta_base: py_config
-                .getattr(py, "lazy_delta_base")?
-                .extract(py)?,
-        };
-        Ok(revlog_delta_config)
-    };
-    with_filelog_cache(
-        py,
-        &py_config,
-        revlog_type,
-        &DELTA_CONFIG_CACHE,
-        get_delta_config,
-    )
-}
-
-fn extract_data_config(
-    py: Python,
-    py_config: PyObject,
-    revlog_type: RevlogType,
-) -> PyResult<RevlogDataConfig> {
-    let get_data_config = || {
-        Ok(RevlogDataConfig {
-            try_pending: py_config.getattr(py, "try_pending")?.extract(py)?,
-            try_split: py_config.getattr(py, "try_split")?.extract(py)?,
-            check_ambig: py_config.getattr(py, "check_ambig")?.extract(py)?,
-            mmap_large_index: py_config
-                .getattr(py, "mmap_large_index")?
-                .extract(py)?,
-            mmap_index_threshold: py_config
-                .getattr(py, "mmap_index_threshold")?
-                .extract(py)?,
-            chunk_cache_size: py_config
-                .getattr(py, "chunk_cache_size")?
-                .extract(py)?,
-            uncompressed_cache_factor: py_config
-                .getattr(py, "uncompressed_cache_factor")?
-                .extract(py)?,
-            uncompressed_cache_count: py_config
-                .getattr(py, "uncompressed_cache_count")?
-                .extract(py)?,
-            with_sparse_read: py_config
-                .getattr(py, "with_sparse_read")?
-                .extract(py)?,
-            sr_density_threshold: py_config
-                .getattr(py, "sr_density_threshold")?
-                .extract(py)?,
-            sr_min_gap_size: py_config
-                .getattr(py, "sr_min_gap_size")?
-                .extract(py)?,
-            general_delta: py_config
-                .getattr(py, "generaldelta")?
-                .extract(py)?,
-        })
-    };
-
-    with_filelog_cache(
-        py,
-        &py_config,
-        revlog_type,
-        &DATA_CONFIG_CACHE,
-        get_data_config,
-    )
-}
-
-fn extract_feature_config(
-    py: Python,
-    py_config: PyObject,
-    revlog_type: RevlogType,
-) -> PyResult<RevlogFeatureConfig> {
-    let get_feature_config = || {
-        let engine_bytes = &py_config
-            .getattr(py, "compression_engine")?
-            .extract::<PyBytes>(py)?;
-        let compression_engine = engine_bytes.data(py);
-        let compression_engine = match compression_engine {
-            b"zlib" => {
-                let compression_options = &py_config
-                    .getattr(py, "compression_engine_options")?
-                    .extract::<PyDict>(py)?;
-                let zlib_level = compression_options
-                    .get_item(py, PyBytes::new(py, &b"zlib.level"[..]));
-                let level = if let Some(level) = zlib_level {
-                    if level.is_none(py) {
-                        None
-                    } else {
-                        Some(level.extract(py)?)
-                    }
-                } else {
-                    None
-                };
-                let mut engine = CompressionConfig::default();
-                if let Some(level) = level {
-                    engine
-                        .set_level(level)
-                        .expect("invalid compression level from Python");
-                }
-                engine
-            }
-            b"zstd" => {
-                let compression_options = &py_config
-                    .getattr(py, "compression_engine_options")?
-                    .extract::<PyDict>(py)?;
-                let zstd_level = compression_options
-                    .get_item(py, PyBytes::new(py, &b"zstd.level"[..]));
-                let level = if let Some(level) = zstd_level {
-                    if level.is_none(py) {
-                        None
-                    } else {
-                        Some(level.extract(py)?)
-                    }
-                } else {
-                    let level = compression_options
-                        .get_item(py, PyBytes::new(py, &b"level"[..]));
-                    if let Some(level) = level {
-                        if level.is_none(py) {
-                            None
-                        } else {
-                            Some(level.extract(py)?)
-                        }
-                    } else {
-                        None
-                    }
-                };
-                CompressionConfig::zstd(level)
-                    .expect("invalid compression level from Python")
-            }
-            b"none" => CompressionConfig::None,
-            e => {
-                return Err(PyErr::new::<ValueError, _>(
-                    py,
-                    format!(
-                        "invalid compression engine {}",
-                        String::from_utf8_lossy(e)
-                    ),
-                ))
-            }
-        };
-        let revlog_feature_config = RevlogFeatureConfig {
-            compression_engine,
-            censorable: py_config.getattr(py, "censorable")?.extract(py)?,
-            has_side_data: py_config
-                .getattr(py, "has_side_data")?
-                .extract(py)?,
-            compute_rank: py_config
-                .getattr(py, "compute_rank")?
-                .extract(py)?,
-            canonical_parent_order: py_config
-                .getattr(py, "canonical_parent_order")?
-                .extract(py)?,
-            enable_ellipsis: py_config
-                .getattr(py, "enable_ellipsis")?
-                .extract(py)?,
-        };
-        Ok(revlog_feature_config)
-    };
-    with_filelog_cache(
-        py,
-        &py_config,
-        revlog_type,
-        &FEATURE_CONFIG_CACHE,
-        get_feature_config,
-    )
-}
-
-fn revlog_error_from_msg(py: Python, e: impl ToString) -> PyErr {
-    let msg = e.to_string();
-
-    match py
-        .import("mercurial.error")
-        .and_then(|m| m.get(py, "RevlogError"))
-    {
-        Err(e) => e,
-        Ok(cls) => {
-            let msg = PyBytes::new(py, msg.as_bytes());
-            PyErr::from_instance(
-                py,
-                cls.call(py, (msg,), None).ok().into_py_object(py),
-            )
-        }
-    }
-}
-
-py_class!(pub class ReadingContextManager |py| {
-    data inner_revlog: RefCell<InnerRevlog>;
-
-    def __enter__(&self) -> PyResult<PyObject> {
-        let res = self.inner_revlog(py)
-            .borrow()
-            .inner(py)
-            .borrow()
-            .enter_reading_context()
-            .map_err(|e| revlog_error_from_msg(py, e));
-        if let Err(e) = res {
-            // `__exit__` is not called from Python if `__enter__` fails
-            self.inner_revlog(py)
-                .borrow()
-                .inner(py)
-                .borrow()
-                .exit_reading_context();
-            return Err(e)
-        }
-        Ok(py.None())
-    }
-
-    def __exit__(
-        &self,
-        ty: Option<PyType>,
-        value: PyObject,
-        traceback: PyObject
-    ) -> PyResult<PyObject> {
-        // unused arguments, keep clippy from complaining without adding
-        // a general rule
-        let _ = ty;
-        let _ = value;
-        let _ = traceback;
-
-        self.inner_revlog(py)
-            .borrow()
-            .inner(py)
-            .borrow()
-            .exit_reading_context();
-        Ok(py.None())
-    }
-});
-
-// Only used from Python *tests*
-py_class!(pub class PyFileHandle |py| {
-    data inner_file: RefCell<std::os::fd::RawFd>;
-
-    def tell(&self) -> PyResult<PyObject> {
-        let locals = PyDict::new(py);
-        locals.set_item(py, "os", py.import("os")?)?;
-        locals.set_item(py, "fd", *self.inner_file(py).borrow())?;
-        let f = py.eval("os.fdopen(fd)", None, Some(&locals))?;
-
-        // Prevent Python from closing the file after garbage collecting.
-        // This is fine since Rust is still holding on to the actual File.
-        // (and also because it's only used in tests).
-        std::mem::forget(f.clone_ref(py));
-
-        locals.set_item(py, "f", f)?;
-        let res = py.eval("f.tell()", None, Some(&locals))?;
-        Ok(res)
-    }
-});
-
-/// Wrapper around a Python transaction object, to keep `hg-core` oblivious
-/// of the fact it's being called from Python.
-pub struct PyTransaction {
-    inner: PyObject,
-}
-
-impl PyTransaction {
-    pub fn new(inner: PyObject) -> Self {
-        Self { inner }
-    }
-}
-
-impl Clone for PyTransaction {
-    fn clone(&self) -> Self {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        Self {
-            inner: self.inner.clone_ref(py),
-        }
-    }
-}
-
-impl Transaction for PyTransaction {
-    fn add(&mut self, file: impl AsRef<std::path::Path>, offset: usize) {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        let file = PyBytes::new(py, &get_bytes_from_path(file.as_ref()));
-        self.inner
-            .call_method(py, "add", (file, offset), None)
-            .expect("transaction add failed");
-    }
-}
-
-py_class!(pub class WritingContextManager |py| {
-    data inner_revlog: RefCell<InnerRevlog>;
-    data transaction: RefCell<PyTransaction>;
-    data data_end: Cell<Option<usize>>;
-
-    def __enter__(&self) -> PyResult<PyObject> {
-        let res = self.inner_revlog(py)
-            .borrow_mut()
-            .inner(py)
-            .borrow_mut()
-            .enter_writing_context(
-                self.data_end(py).get(),
-                &mut *self.transaction(py).borrow_mut()
-            ).map_err(|e| revlog_error_from_msg(py, e));
-        if let Err(e) = res {
-            // `__exit__` is not called from Python if `__enter__` fails
-            self.inner_revlog(py)
-                .borrow_mut()
-                .inner(py)
-                .borrow_mut()
-                .exit_writing_context();
-            return Err(e)
-        }
-        Ok(py.None())
-    }
-
-    def __exit__(
-        &self,
-        ty: Option<PyType>,
-        value: PyObject,
-        traceback: PyObject
-    ) -> PyResult<PyObject> {
-        // unused arguments, keep clippy from complaining without adding
-        // a general rule
-        let _ = ty;
-        let _ = value;
-        let _ = traceback;
-
-        self.inner_revlog(py)
-            .borrow_mut()
-            .inner(py)
-            .borrow_mut()
-            .exit_writing_context();
-        Ok(py.None())
-    }
-});
-
-struct PyFnCache {
-    fncache: PyObject,
-}
-impl PyFnCache {
-    fn new(fncache: PyObject) -> Self {
-        Self { fncache }
-    }
-}
-
-impl Clone for PyFnCache {
-    fn clone(&self) -> Self {
-        let gil = Python::acquire_gil();
-        let py = gil.python();
-        Self {
-            fncache: self.fncache.clone_ref(py),
-        }
-    }
-}
-
-/// Cache whether the fncache is loaded to avoid Python round-trip every time.
-/// Once the fncache is loaded, it stays loaded unless we're in a very
-/// long-running process, none of which we actually support for now.
-static FN_CACHE_IS_LOADED: AtomicBool = AtomicBool::new(false);
-
-impl FnCache for PyFnCache {
-    fn is_loaded(&self) -> bool {
-        if FN_CACHE_IS_LOADED.load(Ordering::Relaxed) {
-            return true;
-        }
-        let gil = Python::acquire_gil();
-        let py = gil.python();
-        // TODO raise in case of error?
-        let is_loaded = self
-            .fncache
-            .getattr(py, "is_loaded")
-            .ok()
-            .map(|o| {
-                o.extract::<bool>(py)
-                    .expect("is_loaded returned something other than a bool")
-            })
-            .unwrap_or(false);
-        if is_loaded {
-            FN_CACHE_IS_LOADED.store(true, Ordering::Relaxed);
-        }
-        is_loaded
-    }
-    fn add(&self, path: &std::path::Path) {
-        let gil = Python::acquire_gil();
-        let py = gil.python();
-        // TODO raise in case of error?
-        self.fncache
-            .call_method(
-                py,
-                "add",
-                (PyBytes::new(py, &get_bytes_from_path(path)),),
-                None,
-            )
-            .ok();
-    }
-}
-
-py_class!(pub class InnerRevlog |py| {
-    @shared data inner: CoreInnerRevlog;
-    data nt: RefCell<Option<CoreNodeTree>>;
-    data docket: RefCell<Option<PyObject>>;
-    // Holds a reference to the mmap'ed persistent nodemap data
-    data nodemap_mmap: RefCell<Option<PyBuffer>>;
-    // Holds a reference to the mmap'ed persistent index data
-    data index_mmap: RefCell<PyBuffer>;
-    data head_revs_py_list: RefCell<Option<PyList>>;
-    data head_node_ids_py_list: RefCell<Option<PyList>>;
-    data revision_cache: RefCell<Option<PyObject>>;
-    data use_persistent_nodemap: bool;
-    data nodemap_queries: AtomicUsize;
-
-    def __new__(
-        _cls,
-        vfs_base: PyObject,
-        fncache: PyObject,
-        vfs_is_readonly: bool,
-        index_data: PyObject,
-        index_file: PyObject,
-        data_file: PyObject,
-        sidedata_file: PyObject,
-        inline: bool,
-        data_config: PyObject,
-        delta_config: PyObject,
-        feature_config: PyObject,
-        chunk_cache: PyObject,
-        default_compression_header: PyObject,
-        revlog_type: usize,
-        use_persistent_nodemap: bool,
-    ) -> PyResult<Self> {
-        Self::inner_new(
-            py,
-            vfs_base,
-            fncache,
-            vfs_is_readonly,
-            index_data,
-            index_file,
-            data_file,
-            sidedata_file,
-            inline,
-            data_config,
-            delta_config,
-            feature_config,
-            chunk_cache,
-            default_compression_header,
-            revlog_type,
-            use_persistent_nodemap
-        )
-    }
-
-    def clear_cache(&self) -> PyResult<PyObject> {
-        assert!(!self.is_delaying(py)?);
-        self.revision_cache(py).borrow_mut().take();
-        self.inner(py).borrow_mut().clear_cache();
-        self.nodemap_queries(py).store(0, Ordering::Relaxed);
-        Ok(py.None())
-    }
-
-    @property def canonical_index_file(&self) -> PyResult<PyBytes> {
-        let path = self.inner(py).borrow().canonical_index_file();
-        Ok(PyBytes::new(py, &get_bytes_from_path(path)))
-    }
-
-    @property def is_delaying(&self) -> PyResult<bool> {
-        Ok(self.inner(py).borrow().is_delaying())
-    }
-
-    @property def _revisioncache(&self) -> PyResult<PyObject> {
-        let cache = &*self.revision_cache(py).borrow();
-        match cache {
-            None => Ok(py.None()),
-            Some(cache) => {
-                Ok(cache.clone_ref(py))
-            }
-        }
-
-    }
-
-    @property def _writinghandles(&self) -> PyResult<PyObject> {
-        use std::os::fd::AsRawFd;
-
-        let inner = self.inner(py).borrow();
-        let handles = inner.python_writing_handles();
-
-        match handles.as_ref() {
-            None => Ok(py.None()),
-            Some(handles) => {
-                let d_handle = if let Some(d_handle) = &handles.data_handle {
-                    let handle = RefCell::new(d_handle.file.as_raw_fd());
-                    Some(PyFileHandle::create_instance(py, handle)?)
-                } else {
-                    None
-                };
-                let handle =
-                    RefCell::new(handles.index_handle.file.as_raw_fd());
-                Ok(
-                    (
-                        PyFileHandle::create_instance(py, handle)?,
-                        d_handle,
-                        py.None(),  // Sidedata handle
-
-                    ).to_py_object(py).into_object()
-                )
-            }
-        }
-
-    }
-
-    @_revisioncache.setter def set_revision_cache(
-        &self,
-        value: Option<PyObject>
-    ) -> PyResult<()> {
-        *self.revision_cache(py).borrow_mut() = value.clone_ref(py);
-        match value {
-            None => {
-                // This means the property has been deleted, *not* that the
-                // property has been set to `None`. Whatever happens is up
-                // to the implementation. Here we just set it to `None`.
-                self
-                    .inner(py)
-                    .borrow()
-                    .last_revision_cache
-                    .lock()
-                    .expect("lock should not be held")
-                    .take();
-            },
-            Some(tuple) => {
-                if tuple.is_none(py) {
-                    self
-                        .inner(py)
-                        .borrow()
-                        .last_revision_cache
-                        .lock()
-                        .expect("lock should not be held")
-                        .take();
-                    return Ok(())
-                }
-                let node = tuple.get_item(py, 0)?.extract::<PyBytes>(py)?;
-                let node = node_from_py_bytes(py, &node)?;
-                let rev = tuple.get_item(py, 1)?.extract::<BaseRevision>(py)?;
-                // Ok because Python only sets this if the revision has been
-                // checked
-                let rev = Revision(rev);
-                let data = tuple.get_item(py, 2)?.extract::<PyBytes>(py)?;
-                let inner = self.inner(py).borrow();
-                let mut last_revision_cache = inner
-                    .last_revision_cache
-                    .lock()
-                    .expect("lock should not be held");
-                *last_revision_cache =
-                    Some((node, rev, Box::new(PyBytesDeref::new(py, data))));
-            }
-        }
-        Ok(())
-    }
-
-    @property def inline(&self) -> PyResult<bool> {
-        Ok(self.inner(py).borrow().is_inline())
-    }
-
-    @inline.setter def set_inline(
-        &self,
-        value: Option<PyObject>
-    ) -> PyResult<()> {
-        if let Some(v) = value {
-            self.inner(py).borrow_mut().inline = v.extract(py)?;
-        };
-        Ok(())
-    }
-
-    @property def index_file(&self) -> PyResult<PyBytes> {
-        Ok(
-            PyBytes::new(
-                py,
-                &get_bytes_from_path(&self.inner(py).borrow().index_file)
-            )
-        )
-    }
-
-    @index_file.setter def set_index_file(
-        &self,
-        value: Option<PyObject>
-    ) -> PyResult<()> {
-        let path = get_path_from_bytes(
-            value
-                .expect("don't delete the index path")
-                .extract::<PyBytes>(py)?
-                .data(py)
-        ).to_owned();
-        self.inner(py).borrow_mut().index_file = path;
-        Ok(())
-    }
-
-    @property def is_writing(&self) -> PyResult<bool> {
-        Ok(self.inner(py).borrow().is_writing())
-    }
-
-    @property def is_open(&self) -> PyResult<bool> {
-        Ok(self.inner(py).borrow().is_open())
-    }
-
-    def issnapshot(&self, rev: PyRevision) -> PyResult<bool> {
-        self.inner_issnapshot(py, UncheckedRevision(rev.0))
-    }
-
-    def _deltachain(&self, *args, **kw) -> PyResult<PyObject> {
-        let args = PyTuple::new(
-            py,
-            &[
-                args.get_item(py, 0),
-                kw.and_then(|d| d.get_item(py, "stoprev")).to_py_object(py),
-            ]
-        );
-        self._index_deltachain(py, &args, kw)
-    }
-
-    def compress(&self, data: PyObject) -> PyResult<PyTuple> {
-        let inner = self.inner(py).borrow();
-        let py_buffer = PyBuffer::get(py, &data)?;
-        let deref = PyBufferDeref::new(py, py_buffer)?;
-        let compressed = inner.compress(&deref)
-        .map_err(|e| revlog_error_from_msg(py, e))?;
-        let compressed = compressed.as_deref();
-        let header = if compressed.is_some() {
-            PyBytes::new(py, &b""[..])
-        } else {
-            PyBytes::new(py, &b"u"[..])
-        };
-        Ok(
-            (
-                header,
-                PyBytes::new(py, compressed.unwrap_or(&deref))
-            ).to_py_object(py)
-        )
-    }
-
-    def reading(&self) -> PyResult<ReadingContextManager> {
-        ReadingContextManager::create_instance(
-            py,
-            RefCell::new(self.clone_ref(py)),
-        )
-    }
-
-    def writing(
-        &self,
-        transaction: PyObject,
-        data_end: Option<usize>,
-        sidedata_end: Option<usize>,
-    ) -> PyResult<WritingContextManager> {
-        // Silence unused argument (only relevant for changelog v2)
-        let _ = sidedata_end;
-        WritingContextManager::create_instance(
-            py,
-            RefCell::new(self.clone_ref(py)),
-            RefCell::new(PyTransaction::new(transaction)),
-            Cell::new(data_end)
-        )
-    }
-
-    def split_inline(
-        &self,
-        _tr: PyObject,
-        header: i32,
-        new_index_file_path: Option<PyObject>
-    ) -> PyResult<PyBytes> {
-        let mut inner = self.inner(py).borrow_mut();
-        let new_index_file_path = match new_index_file_path {
-            Some(path) => {
-                let path = path.extract::<PyBytes>(py)?;
-                Some(get_path_from_bytes(path.data(py)).to_owned())
-            },
-            None => None,
-        };
-        let header = IndexHeader::parse(&header.to_be_bytes());
-        let header = header.expect("invalid header bytes");
-        let path = inner
-            .split_inline(header, new_index_file_path)
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        Ok(PyBytes::new(py, &get_bytes_from_path(path)))
-    }
-
-    def get_segment_for_revs(
-        &self,
-        startrev: PyRevision,
-        endrev: PyRevision,
-    ) -> PyResult<PyTuple> {
-        let inner = self.inner(py).borrow();
-        let (offset, data) = inner
-            .get_segment_for_revs(Revision(startrev.0), Revision(endrev.0))
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        let data = PyBytes::new(py, &data);
-        Ok((offset, data).to_py_object(py))
-    }
-
-    def raw_text(
-        &self,
-        _node: PyObject,
-        rev: PyRevision
-    ) -> PyResult<PyBytes> {
-        let inner = self.inner(py).borrow();
-        let mut py_bytes = PyBytes::new(py, &[]);
-        inner
-            .raw_text(Revision(rev.0), |size, f| {
-                py_bytes = with_pybytes_buffer(py, size, f)?;
-                Ok(())
-            }).map_err(|e| revlog_error_from_msg(py, e))?;
-        Ok(py_bytes)
-    }
-
-    def _chunk(
-        &self,
-        rev: PyRevision,
-    ) -> PyResult<PyBytes> {
-        let inner = self.inner(py).borrow();
-        let chunk = inner
-            .chunk_for_rev(Revision(rev.0))
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        let chunk = PyBytes::new(py, &chunk);
-        Ok(chunk)
-    }
-
-    def write_entry(
-        &self,
-        transaction: PyObject,
-        entry: PyObject,
-        data: PyTuple,
-        _link: PyObject,
-        offset: usize,
-        _sidedata: PyObject,
-        _sidedata_offset: PyInt,
-        index_end: Option<u64>,
-        data_end: Option<u64>,
-        _sidedata_end: Option<PyInt>,
-    ) -> PyResult<PyTuple> {
-        let mut inner = self.inner(py).borrow_mut();
-        let transaction = PyTransaction::new(transaction);
-        let py_bytes = entry.extract(py)?;
-        let entry = PyBytesDeref::new(py, py_bytes);
-        let header = data.get_item(py, 0).extract::<PyBytes>(py)?;
-        let header = header.data(py);
-        let data = data.get_item(py, 1);
-        let py_bytes = data.extract(py)?;
-        let data = PyBytesDeref::new(py, py_bytes);
-        Ok(
-            inner.write_entry(
-                transaction,
-                &entry,
-                (header, &data),
-                offset,
-                index_end,
-                data_end
-            ).map_err(|e| revlog_error_from_msg(py, e))?
-             .to_py_object(py)
-        )
-    }
-
-    def delay(&self) -> PyResult<Option<PyBytes>> {
-        let path = self.inner(py)
-            .borrow_mut()
-            .delay()
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        Ok(path.map(|p| PyBytes::new(py, &get_bytes_from_path(p))))
-    }
-
-    def write_pending(&self) -> PyResult<PyTuple> {
-        let (path, any_pending) = self.inner(py)
-            .borrow_mut()
-            .write_pending()
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        let maybe_path = match path {
-            Some(path) => {
-                PyBytes::new(py, &get_bytes_from_path(path)).into_object()
-            },
-            None => {
-                py.None()
-            }
-        };
-        Ok(
-            (
-                maybe_path,
-                any_pending
-            ).to_py_object(py)
-        )
-    }
-
-    def finalize_pending(&self) -> PyResult<PyBytes> {
-        let path = self.inner(py)
-            .borrow_mut()
-            .finalize_pending()
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        Ok(PyBytes::new(py, &get_bytes_from_path(path)))
-    }
-
-    // -- forwarded index methods --
-
-    def _index_get_rev(&self, node: PyBytes) -> PyResult<Option<PyRevision>> {
-        let node = node_from_py_bytes(py, &node)?;
-        // Filelogs have no persistent nodemaps and are often small, use a
-        // brute force lookup from the end backwards. If there is a very large
-        // filelog (automation file that changes every commit etc.), it also
-        // seems to work quite well for all measured purposes so far.
-        let mut nodemap_queries =
-            self.nodemap_queries(py).fetch_add(1, Ordering::Relaxed);
-        // Still need to add since `fetch_add` returns the old value
-        nodemap_queries += 1;
-        if !*self.use_persistent_nodemap(py) && nodemap_queries <= 4 {
-            let idx = &self.inner(py).borrow().index;
-            let res =
-                idx.rev_from_node_no_persistent_nodemap(node.into()).ok();
-            return Ok(res.map(Into::into))
-        }
-        let opt = self.get_nodetree(py)?.borrow();
-        let nt = opt.as_ref().expect("nodetree should be set");
-        let ridx = &self.inner(py).borrow().index;
-        let rust_rev =
-            nt.find_bin(ridx, node.into()).map_err(|e| nodemap_error(py, e))?;
-        Ok(rust_rev.map(Into::into))
-    }
-
-    /// same as `_index_get_rev()` but raises a bare `error.RevlogError` if node
-    /// is not found.
-    ///
-    /// No need to repeat `node` in the exception, `mercurial/revlog.py`
-    /// will catch and rewrap with it
-    def _index_rev(&self, node: PyBytes) -> PyResult<PyRevision> {
-        self._index_get_rev(py, node)?.ok_or_else(|| revlog_error(py))
-    }
-
-    /// return True if the node exist in the index
-    def _index_has_node(&self, node: PyBytes) -> PyResult<bool> {
-        // TODO OPTIM we could avoid a needless conversion here,
-        // to do when scaffolding for pure Rust switch is removed,
-        // as `_index_get_rev()` currently does the necessary assertions
-        self._index_get_rev(py, node).map(|opt| opt.is_some())
-    }
-
-    /// find length of shortest hex nodeid of a binary ID
-    def _index_shortest(&self, node: PyBytes) -> PyResult<usize> {
-        let opt = self.get_nodetree(py)?.borrow();
-        let nt = opt.as_ref().expect("nodetree should be set");
-        let idx = &self.inner(py).borrow().index;
-        match nt.unique_prefix_len_node(idx, &node_from_py_bytes(py, &node)?)
-        {
-            Ok(Some(l)) => Ok(l),
-            Ok(None) => Err(revlog_error(py)),
-            Err(e) => Err(nodemap_error(py, e)),
-        }
-    }
-
-    def _index_partialmatch(
-        &self,
-        node: PyObject
-    ) -> PyResult<Option<PyBytes>> {
-        let opt = self.get_nodetree(py)?.borrow();
-        let nt = opt.as_ref().expect("nodetree should be set");
-        let idx = &self.inner(py).borrow().index;
-
-        let node = node.extract::<PyBytes>(py)?;
-        let node_as_string = String::from_utf8_lossy(node.data(py));
-
-        let prefix = NodePrefix::from_hex(node_as_string.to_string())
-            .map_err(|_| PyErr::new::<ValueError, _>(
-                py, format!("Invalid node or prefix '{}'", node_as_string))
-            )?;
-
-        nt.find_bin(idx, prefix)
-            // TODO make an inner API returning the node directly
-            .map(|opt| opt.map(|rev| {
-                    PyBytes::new(
-                        py,
-                        idx.node(rev).expect("node should exist").as_bytes()
-                    )
-            }))
-            .map_err(|e| nodemap_error(py, e))
-
-    }
-
-    /// append an index entry
-    def _index_append(&self, tup: PyTuple) -> PyResult<PyObject> {
-        if tup.len(py) < 8 {
-            // this is better than the panic promised by tup.get_item()
-            return Err(
-                PyErr::new::<IndexError, _>(py, "tuple index out of range"))
-        }
-        let node_bytes = tup.get_item(py, 7).extract(py)?;
-        let node = node_from_py_object(py, &node_bytes)?;
-
-        let rev = self.len(py)? as BaseRevision;
-
-        // This is ok since we will just add the revision to the index
-        let rev = Revision(rev);
-        self.inner(py)
-            .borrow_mut()
-            .index
-            .append(py_tuple_to_revision_data_params(py, tup)?)
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        let idx = &self.inner(py).borrow().index;
-        self.get_nodetree(py)?
-            .borrow_mut()
-            .as_mut()
-            .expect("nodetree should be set")
-            .insert(idx, &node, rev)
-            .map_err(|e| nodemap_error(py, e))?;
-        Ok(py.None())
-    }
-
-    def _index___delitem__(&self, key: PyObject) -> PyResult<PyObject> {
-        // __delitem__ is both for `del idx[r]` and `del idx[r1:r2]`
-        let start = if let Ok(rev) = key.extract(py) {
-            UncheckedRevision(rev)
-        } else {
-            let start = key.getattr(py, "start")?;
-            UncheckedRevision(start.extract(py)?)
-        };
-        let mut borrow = self.inner(py).borrow_mut();
-        let start = borrow
-            .index
-            .check_revision(start)
-            .ok_or_else(|| {
-                nodemap_error(py, NodeMapError::RevisionNotInIndex(start))
-            })?;
-        borrow.index
-            .remove(start)
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        drop(borrow);
-        let mut opt = self.get_nodetree(py)?.borrow_mut();
-        let nt = opt.as_mut().expect("nodetree should be set");
-        nt.invalidate_all();
-        self.fill_nodemap(py, nt)?;
-        Ok(py.None())
-    }
-
-    /// return the gca set of the given revs
-    def _index_ancestors(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rust_res = self.inner_ancestors(py, args)?;
-        Ok(rust_res)
-    }
-
-    /// return the heads of the common ancestors of the given revs
-    def _index_commonancestorsheads(
-        &self,
-        *args,
-        **_kw
-    ) -> PyResult<PyObject> {
-        let rust_res = self.inner_commonancestorsheads(py, args)?;
-        Ok(rust_res)
-    }
-
-    /// Clear the index caches and inner py_class data.
-    /// It is Python's responsibility to call `update_nodemap_data` again.
-    def _index_clearcaches(&self) -> PyResult<PyObject> {
-        self.nt(py).borrow_mut().take();
-        self.docket(py).borrow_mut().take();
-        self.nodemap_mmap(py).borrow_mut().take();
-        self.head_revs_py_list(py).borrow_mut().take();
-        self.head_node_ids_py_list(py).borrow_mut().take();
-        self.inner(py).borrow_mut().index.clear_caches();
-        Ok(py.None())
-    }
-
-    /// return the raw binary string representing a revision
-    def _index_entry_binary(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rindex = &self.inner(py).borrow().index;
-        let rev = UncheckedRevision(args.get_item(py, 0).extract(py)?);
-        let rust_bytes = rindex.check_revision(rev).and_then(
-            |r| rindex.entry_binary(r)).ok_or_else(|| rev_not_in_index(py, rev)
-        )?;
-        let rust_res = PyBytes::new(py, rust_bytes).into_object();
-        Ok(rust_res)
-    }
-
-
-    /// return a binary packed version of the header
-    def _index_pack_header(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rindex = &self.inner(py).borrow().index;
-        let packed = rindex.pack_header(args.get_item(py, 0).extract(py)?);
-        let rust_res = PyBytes::new(py, &packed).into_object();
-        Ok(rust_res)
-    }
-
-    /// compute phases
-    def _index_computephasesmapsets(
-        &self,
-        *args,
-        **_kw
-    ) -> PyResult<PyObject> {
-        let py_roots = args.get_item(py, 0).extract::<PyDict>(py)?;
-        let rust_res = self.inner_computephasesmapsets(py, py_roots)?;
-        Ok(rust_res)
-    }
-
-    /// reachableroots
-    def _index_reachableroots2(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rust_res = self.inner_reachableroots2(
-            py,
-            UncheckedRevision(args.get_item(py, 0).extract(py)?),
-            args.get_item(py, 1),
-            args.get_item(py, 2),
-            args.get_item(py, 3).extract(py)?,
-        )?;
-        Ok(rust_res)
-    }
-
-    /// get head revisions
-    def _index_headrevs(&self, *args, **_kw) -> PyResult<PyObject> {
-        let (filtered_revs, stop_rev) = match &args.len(py) {
-             0 => Ok((py.None(), py.None())),
-             1 => Ok((args.get_item(py, 0), py.None())),
-             2 => Ok((args.get_item(py, 0), args.get_item(py, 1))),
-             _ => Err(PyErr::new::<cpython::exc::TypeError, _>(py, "too many arguments")),
-        }?;
-        self.inner_headrevs(py, &filtered_revs, &stop_rev)
-    }
-
-    /// get head nodeids
-    def _index_head_node_ids(&self) -> PyResult<PyObject> {
-        let rust_res = self.inner_head_node_ids(py)?;
-        Ok(rust_res)
-    }
-
-    /// get diff in head revisions
-    def _index_headrevsdiff(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rust_res = self.inner_headrevsdiff(
-          py,
-          &args.get_item(py, 0),
-          &args.get_item(py, 1))?;
-        Ok(rust_res)
-    }
-
-    /// True if the object is a snapshot
-    def _index_issnapshot(&self, *args, **_kw) -> PyResult<bool> {
-        let rev = UncheckedRevision(args.get_item(py, 0).extract(py)?);
-        self.inner_issnapshot(py, rev)
-    }
-
-    /// Gather snapshot data in a cache dict
-    def _index_findsnapshots(&self, *args, **_kw) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let cache: PyDict = args.get_item(py, 0).extract(py)?;
-        // this methods operates by setting new values in the cache,
-        // hence we will compare results by letting the C implementation
-        // operate over a deepcopy of the cache, and finally compare both
-        // caches.
-        let c_cache = PyDict::new(py);
-        for (k, v) in cache.items(py) {
-            c_cache.set_item(py, k, PySet::new(py, v)?)?;
-        }
-
-        let start_rev = UncheckedRevision(args.get_item(py, 1).extract(py)?);
-        let end_rev = UncheckedRevision(args.get_item(py, 2).extract(py)?);
-        let mut cache_wrapper = PySnapshotsCache{ py, dict: cache };
-        index.find_snapshots(
-            start_rev,
-            end_rev,
-            &mut cache_wrapper,
-        ).map_err(|_| revlog_error(py))?;
-        Ok(py.None())
-    }
-
-    /// determine revisions with deltas to reconstruct fulltext
-    def _index_deltachain(&self, *args, **_kw) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let rev = args.get_item(py, 0).extract::<BaseRevision>(py)?.into();
-        let stop_rev =
-            args.get_item(py, 1).extract::<Option<BaseRevision>>(py)?;
-        let rev = index.check_revision(rev).ok_or_else(|| {
-            nodemap_error(py, NodeMapError::RevisionNotInIndex(rev))
-        })?;
-        let stop_rev = if let Some(stop_rev) = stop_rev {
-            let stop_rev = UncheckedRevision(stop_rev);
-            Some(index.check_revision(stop_rev).ok_or_else(|| {
-                nodemap_error(py, NodeMapError::RevisionNotInIndex(stop_rev))
-            })?)
-        } else {None};
-        let (chain, stopped) = index.delta_chain(
-            rev, stop_rev
-        ).map_err(|e| {
-            PyErr::new::<cpython::exc::ValueError, _>(py, e.to_string())
-        })?;
-
-        let chain: Vec<_> = chain.into_iter().map(|r| r.0).collect();
-        Ok(
-            PyTuple::new(
-                py,
-                &[
-                    chain.into_py_object(py).into_object(),
-                    stopped.into_py_object(py).into_object()
-                ]
-            ).into_object()
-        )
-    }
-
-    /// slice planned chunk read to reach a density threshold
-    def _index_slicechunktodensity(&self, *args, **_kw) -> PyResult<PyObject> {
-        let rust_res = self.inner_slicechunktodensity(
-            py,
-            args.get_item(py, 0),
-            args.get_item(py, 1).extract(py)?,
-            args.get_item(py, 2).extract(py)?
-        )?;
-        Ok(rust_res)
-    }
-
-    def _index___len__(&self) -> PyResult<usize> {
-        self.len(py)
-    }
-
-    def _index___getitem__(&self, key: PyObject) -> PyResult<PyObject> {
-        let rust_res = self.inner_getitem(py, key.clone_ref(py))?;
-        Ok(rust_res)
-    }
-
-    def _index___contains__(&self, item: PyObject) -> PyResult<bool> {
-        // ObjectProtocol does not seem to provide contains(), so
-        // this is an equivalent implementation of the index_contains()
-        // defined in revlog.c
-        match item.extract::<i32>(py) {
-            Ok(rev) => {
-                Ok(rev >= -1 && rev < self.len(py)? as BaseRevision)
-            }
-            Err(_) => {
-                let item_bytes: PyBytes = item.extract(py)?;
-                let rust_res = self._index_has_node(py, item_bytes)?;
-                Ok(rust_res)
-            }
-        }
-    }
-
-    def _index_nodemap_data_all(&self) -> PyResult<PyBytes> {
-        self.inner_nodemap_data_all(py)
-    }
-
-    def _index_nodemap_data_incremental(&self) -> PyResult<PyObject> {
-        self.inner_nodemap_data_incremental(py)
-    }
-
-    def _index_update_nodemap_data(
-        &self,
-        docket: PyObject,
-        nm_data: PyObject
-    ) -> PyResult<PyObject> {
-        self.inner_update_nodemap_data(py, docket, nm_data)
-    }
-
-    @property
-    def _index_entry_size(&self) -> PyResult<PyInt> {
-        let rust_res: PyInt = INDEX_ENTRY_SIZE.to_py_object(py);
-        Ok(rust_res)
-    }
-
-    @property
-    def _index_rust_ext_compat(&self) -> PyResult<PyInt> {
-        // will be entirely removed when the Rust index yet useful to
-        // implement in Rust to detangle things when removing `self.cindex`
-        let rust_res: PyInt = 1.to_py_object(py);
-        Ok(rust_res)
-    }
-
-    @property
-    def _index_is_rust(&self) -> PyResult<PyBool> {
-        Ok(false.to_py_object(py))
-    }
-
-
-});
-
-/// Forwarded index methods?
-impl InnerRevlog {
-    pub fn pub_inner<'p, 'a: 'p>(
-        &'a self,
-        py: Python<'p>,
-    ) -> PySharedRef<'p, CoreInnerRevlog> {
-        self.inner(py)
-    }
-
-    fn len(&self, py: Python) -> PyResult<usize> {
-        let rust_index_len = self.inner(py).borrow().index.len();
-        Ok(rust_index_len)
-    }
-    /// This is scaffolding at this point, but it could also become
-    /// a way to start a persistent nodemap or perform a
-    /// vacuum / repack operation
-    fn fill_nodemap(
-        &self,
-        py: Python,
-        nt: &mut CoreNodeTree,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        for r in 0..self.len(py)? {
-            let rev = Revision(r as BaseRevision);
-            // in this case node() won't ever return None
-            nt.insert(index, index.node(rev).expect("node should exist"), rev)
-                .map_err(|e| nodemap_error(py, e))?
-        }
-        Ok(py.None())
-    }
-
-    fn get_nodetree<'a>(
-        &'a self,
-        py: Python<'a>,
-    ) -> PyResult<&'a RefCell<Option<CoreNodeTree>>> {
-        if self.nt(py).borrow().is_none() {
-            let readonly = Box::<Vec<_>>::default();
-            let mut nt = CoreNodeTree::load_bytes(readonly, 0);
-            self.fill_nodemap(py, &mut nt)?;
-            self.nt(py).borrow_mut().replace(nt);
-        }
-        Ok(self.nt(py))
-    }
-
-    /// Returns the full nodemap bytes to be written as-is to disk
-    fn inner_nodemap_data_all(&self, py: Python) -> PyResult<PyBytes> {
-        let nodemap = self
-            .get_nodetree(py)?
-            .borrow_mut()
-            .take()
-            .expect("nodetree should exist");
-        let (readonly, bytes) = nodemap.into_readonly_and_added_bytes();
-
-        // If there's anything readonly, we need to build the data again from
-        // scratch
-        let bytes = if readonly.len() > 0 {
-            let mut nt = CoreNodeTree::load_bytes(Box::<Vec<_>>::default(), 0);
-            self.fill_nodemap(py, &mut nt)?;
-
-            let (readonly, bytes) = nt.into_readonly_and_added_bytes();
-            assert_eq!(readonly.len(), 0);
-
-            bytes
-        } else {
-            bytes
-        };
-
-        let bytes = PyBytes::new(py, &bytes);
-        Ok(bytes)
-    }
-
-    /// Returns the last saved docket along with the size of any changed data
-    /// (in number of blocks), and said data as bytes.
-    fn inner_nodemap_data_incremental(
-        &self,
-        py: Python,
-    ) -> PyResult<PyObject> {
-        let docket = self.docket(py).borrow();
-        let docket = match docket.as_ref() {
-            Some(d) => d,
-            None => return Ok(py.None()),
-        };
-
-        let node_tree = self
-            .get_nodetree(py)?
-            .borrow_mut()
-            .take()
-            .expect("nodetree should exist");
-        let masked_blocks = node_tree.masked_readonly_blocks();
-        let (_, data) = node_tree.into_readonly_and_added_bytes();
-        let changed = masked_blocks * std::mem::size_of::<Block>();
-
-        Ok((docket, changed, PyBytes::new(py, &data))
-            .to_py_object(py)
-            .into_object())
-    }
-
-    /// Update the nodemap from the new (mmaped) data.
-    /// The docket is kept as a reference for later incremental calls.
-    fn inner_update_nodemap_data(
-        &self,
-        py: Python,
-        docket: PyObject,
-        nm_data: PyObject,
-    ) -> PyResult<PyObject> {
-        // Safety: we keep the buffer around inside the class as `nodemap_mmap`
-        let (buf, bytes) = unsafe { mmap_keeparound(py, nm_data)? };
-        let len = buf.item_count();
-        self.nodemap_mmap(py).borrow_mut().replace(buf);
-
-        let mut nt = CoreNodeTree::load_bytes(bytes, len);
-
-        let data_tip = docket
-            .getattr(py, "tip_rev")?
-            .extract::<BaseRevision>(py)?
-            .into();
-        self.docket(py).borrow_mut().replace(docket.clone_ref(py));
-        let idx = &self.inner(py).borrow().index;
-        let data_tip = idx.check_revision(data_tip).ok_or_else(|| {
-            nodemap_error(py, NodeMapError::RevisionNotInIndex(data_tip))
-        })?;
-        let current_tip = idx.len();
-
-        for r in (data_tip.0 + 1)..current_tip as BaseRevision {
-            let rev = Revision(r);
-            // in this case node() won't ever return None
-            nt.insert(idx, idx.node(rev).expect("node should exist"), rev)
-                .map_err(|e| nodemap_error(py, e))?
-        }
-
-        *self.nt(py).borrow_mut() = Some(nt);
-
-        Ok(py.None())
-    }
-
-    fn inner_getitem(&self, py: Python, key: PyObject) -> PyResult<PyObject> {
-        let idx = &self.inner(py).borrow().index;
-        Ok(match key.extract::<BaseRevision>(py) {
-            Ok(key_as_int) => {
-                let entry_params = if key_as_int == NULL_REVISION.0 {
-                    RevisionDataParams::default()
-                } else {
-                    let rev = UncheckedRevision(key_as_int);
-                    match idx.entry_as_params(rev) {
-                        Some(e) => e,
-                        None => {
-                            return Err(PyErr::new::<IndexError, _>(
-                                py,
-                                "revlog index out of range",
-                            ));
-                        }
-                    }
-                };
-                revision_data_params_to_py_tuple(py, entry_params)
-                    .into_object()
-            }
-            _ => self
-                ._index_get_rev(py, key.extract::<PyBytes>(py)?)?
-                .map_or_else(
-                    || py.None(),
-                    |py_rev| py_rev.into_py_object(py).into_object(),
-                ),
-        })
-    }
-
-    fn inner_head_node_ids(&self, py: Python) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-
-        // We don't use the shortcut here, as it's actually slower to loop
-        // through the cached `PyList` than to re-do the whole computation for
-        // large lists, which are the performance sensitive ones anyway.
-        let head_revs = index.head_revs().map_err(|e| graph_error(py, e))?;
-        let res: Vec<_> = head_revs
-            .iter()
-            .map(|r| {
-                PyBytes::new(
-                    py,
-                    index
-                        .node(*r)
-                        .expect("rev should have been in the index")
-                        .as_bytes(),
-                )
-                .into_object()
-            })
-            .collect();
-
-        self.cache_new_heads_py_list(&head_revs, py);
-        self.cache_new_heads_node_ids_py_list(&head_revs, py);
-
-        Ok(PyList::new(py, &res).into_object())
-    }
-
-    fn inner_headrevs(
-        &self,
-        py: Python,
-        filtered_revs: &PyObject,
-        stop_rev: &PyObject,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let stop_rev = if stop_rev.is_none(py) {
-            None
-        } else {
-            let rev = stop_rev.extract::<i32>(py)?;
-            if 0 <= rev && rev < index.len() as BaseRevision {
-                Some(Revision(rev))
-            } else {
-                None
-            }
-        };
-        let from_core = match (filtered_revs.is_none(py), stop_rev.is_none()) {
-            (true, true) => index.head_revs_shortcut(),
-            (true, false) => {
-                index.head_revs_advanced(&HashSet::new(), stop_rev, false)
-            }
-            _ => {
-                let filtered_revs =
-                    rev_pyiter_collect(py, filtered_revs, index)?;
-                index.head_revs_advanced(
-                    &filtered_revs,
-                    stop_rev,
-                    stop_rev.is_none(),
-                )
-            }
-        };
-
-        if stop_rev.is_some() {
-            // we don't cache result for now
-            let new_heads = from_core
-                .map_err(|e| graph_error(py, e))?
-                .expect("this case should not be cached yet");
-
-            let as_vec: Vec<PyObject> = new_heads
-                .iter()
-                .map(|r| PyRevision::from(*r).into_py_object(py).into_object())
-                .collect();
-            Ok(PyList::new(py, &as_vec).into_object())
-        } else {
-            if let Some(new_heads) =
-                from_core.map_err(|e| graph_error(py, e))?
-            {
-                self.cache_new_heads_py_list(&new_heads, py);
-            }
-
-            Ok(self
-                .head_revs_py_list(py)
-                .borrow()
-                .as_ref()
-                .expect("head revs should be cached")
-                .clone_ref(py)
-                .into_object())
-        }
-    }
-
-    fn check_revision(
-        index: &Index,
-        rev: UncheckedRevision,
-        py: Python,
-    ) -> PyResult<Revision> {
-        index
-            .check_revision(rev)
-            .ok_or_else(|| rev_not_in_index(py, rev))
-    }
-
-    fn inner_headrevsdiff(
-        &self,
-        py: Python,
-        begin: &PyObject,
-        end: &PyObject,
-    ) -> PyResult<PyObject> {
-        let begin = begin.extract::<BaseRevision>(py)?;
-        let end = end.extract::<BaseRevision>(py)?;
-        let index = &self.inner(py).borrow().index;
-        let begin =
-            Self::check_revision(index, UncheckedRevision(begin - 1), py)?;
-        let end = Self::check_revision(index, UncheckedRevision(end - 1), py)?;
-        let (removed, added) = index
-            .head_revs_diff(begin, end)
-            .map_err(|e| graph_error(py, e))?;
-        let removed: Vec<_> =
-            removed.into_iter().map(PyRevision::from).collect();
-        let added: Vec<_> = added.into_iter().map(PyRevision::from).collect();
-        let res = (removed, added).to_py_object(py).into_object();
-        Ok(res)
-    }
-
-    fn cache_new_heads_node_ids_py_list(
-        &self,
-        new_heads: &[Revision],
-        py: Python<'_>,
-    ) -> PyList {
-        let index = &self.inner(py).borrow().index;
-        let as_vec: Vec<PyObject> = new_heads
-            .iter()
-            .map(|r| {
-                PyBytes::new(
-                    py,
-                    index
-                        .node(*r)
-                        .expect("rev should have been in the index")
-                        .as_bytes(),
-                )
-                .into_object()
-            })
-            .collect();
-        let new_heads_py_list = PyList::new(py, &as_vec);
-        *self.head_node_ids_py_list(py).borrow_mut() =
-            Some(new_heads_py_list.clone_ref(py));
-        new_heads_py_list
-    }
-
-    fn cache_new_heads_py_list(
-        &self,
-        new_heads: &[Revision],
-        py: Python<'_>,
-    ) -> PyList {
-        let as_vec: Vec<PyObject> = new_heads
-            .iter()
-            .map(|r| PyRevision::from(*r).into_py_object(py).into_object())
-            .collect();
-        let new_heads_py_list = PyList::new(py, &as_vec);
-        *self.head_revs_py_list(py).borrow_mut() =
-            Some(new_heads_py_list.clone_ref(py));
-        new_heads_py_list
-    }
-
-    fn inner_ancestors(
-        &self,
-        py: Python,
-        py_revs: &PyTuple,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let revs: Vec<_> = rev_pyiter_collect(py, py_revs.as_object(), index)?;
-        let as_vec: Vec<_> = index
-            .ancestors(&revs)
-            .map_err(|e| graph_error(py, e))?
-            .iter()
-            .map(|r| PyRevision::from(*r).into_py_object(py).into_object())
-            .collect();
-        Ok(PyList::new(py, &as_vec).into_object())
-    }
-
-    fn inner_commonancestorsheads(
-        &self,
-        py: Python,
-        py_revs: &PyTuple,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let revs: Vec<_> = rev_pyiter_collect(py, py_revs.as_object(), index)?;
-        let as_vec: Vec<_> = index
-            .common_ancestor_heads(&revs)
-            .map_err(|e| graph_error(py, e))?
-            .iter()
-            .map(|r| PyRevision::from(*r).into_py_object(py).into_object())
-            .collect();
-        Ok(PyList::new(py, &as_vec).into_object())
-    }
-
-    fn inner_computephasesmapsets(
-        &self,
-        py: Python,
-        py_roots: PyDict,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let roots: Result<HashMap<Phase, Vec<Revision>>, PyErr> = py_roots
-            .items_list(py)
-            .iter(py)
-            .map(|r| {
-                let phase = r.get_item(py, 0)?;
-                let revs: Vec<_> =
-                    rev_pyiter_collect(py, &r.get_item(py, 1)?, index)?;
-                let phase = Phase::try_from(phase.extract::<usize>(py)?)
-                    .map_err(|_| revlog_error(py));
-                Ok((phase?, revs))
-            })
-            .collect();
-        let (len, phase_maps) = index
-            .compute_phases_map_sets(roots?)
-            .map_err(|e| graph_error(py, e))?;
-
-        // Ugly hack, but temporary
-        const IDX_TO_PHASE_NUM: [usize; 4] = [1, 2, 32, 96];
-        let py_phase_maps = PyDict::new(py);
-        for (idx, roots) in phase_maps.into_iter().enumerate() {
-            let phase_num = IDX_TO_PHASE_NUM[idx].into_py_object(py);
-            // This is a bit faster than collecting into a `Vec` and passing
-            // it to `PySet::new`.
-            let set = PySet::empty(py)?;
-            for rev in roots {
-                set.add(py, PyRevision::from(rev).into_py_object(py))?;
-            }
-            py_phase_maps.set_item(py, phase_num, set)?;
-        }
-        Ok(PyTuple::new(
-            py,
-            &[
-                len.into_py_object(py).into_object(),
-                py_phase_maps.into_object(),
-            ],
-        )
-        .into_object())
-    }
-
-    fn inner_slicechunktodensity(
-        &self,
-        py: Python,
-        revs: PyObject,
-        target_density: f64,
-        min_gap_size: usize,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let revs: Vec<_> = rev_pyiter_collect(py, &revs, index)?;
-        let as_nested_vec =
-            index.slice_chunk_to_density(&revs, target_density, min_gap_size);
-        let mut res = Vec::with_capacity(as_nested_vec.len());
-        let mut py_chunk = Vec::new();
-        for chunk in as_nested_vec {
-            py_chunk.clear();
-            py_chunk.reserve_exact(chunk.len());
-            for rev in chunk {
-                py_chunk.push(
-                    PyRevision::from(rev).into_py_object(py).into_object(),
-                );
-            }
-            res.push(PyList::new(py, &py_chunk).into_object());
-        }
-        // This is just to do the same as C, not sure why it does this
-        if res.len() == 1 {
-            Ok(PyTuple::new(py, &res).into_object())
-        } else {
-            Ok(PyList::new(py, &res).into_object())
-        }
-    }
-
-    fn inner_reachableroots2(
-        &self,
-        py: Python,
-        min_root: UncheckedRevision,
-        heads: PyObject,
-        roots: PyObject,
-        include_path: bool,
-    ) -> PyResult<PyObject> {
-        let index = &self.inner(py).borrow().index;
-        let heads = rev_pyiter_collect_or_else(py, &heads, index, |_rev| {
-            PyErr::new::<IndexError, _>(py, "head out of range")
-        })?;
-        let roots: Result<_, _> = roots
-            .iter(py)?
-            .map(|r| {
-                r.and_then(|o| match o.extract::<PyRevision>(py) {
-                    Ok(r) => Ok(UncheckedRevision(r.0)),
-                    Err(e) => Err(e),
-                })
-            })
-            .collect();
-        let as_set = index
-            .reachable_roots(min_root, heads, roots?, include_path)
-            .map_err(|e| graph_error(py, e))?;
-        let as_vec: Vec<PyObject> = as_set
-            .iter()
-            .map(|r| PyRevision::from(*r).into_py_object(py).into_object())
-            .collect();
-        Ok(PyList::new(py, &as_vec).into_object())
-    }
-    fn inner_issnapshot(
-        &self,
-        py: Python,
-        rev: UncheckedRevision,
-    ) -> PyResult<bool> {
-        let inner = &self.inner(py).borrow();
-        let index = &self.inner(py).borrow().index;
-        let rev = index
-            .check_revision(rev)
-            .ok_or_else(|| rev_not_in_index(py, rev))?;
-        let result = inner.is_snapshot(rev).map_err(|e| {
-            PyErr::new::<cpython::exc::ValueError, _>(py, e.to_string())
-        })?;
-        Ok(result)
-    }
-}
-
-impl InnerRevlog {
-    pub fn inner_new(
-        py: Python,
-        vfs_base: PyObject,
-        fncache: PyObject,
-        vfs_is_readonly: bool,
-        index_data: PyObject,
-        index_file: PyObject,
-        data_file: PyObject,
-        _sidedata_file: PyObject,
-        inline: bool,
-        data_config: PyObject,
-        delta_config: PyObject,
-        feature_config: PyObject,
-        _chunk_cache: PyObject,
-        _default_compression_header: PyObject,
-        revlog_type: usize,
-        use_persistent_nodemap: bool,
-    ) -> PyResult<Self> {
-        let index_file =
-            get_path_from_bytes(index_file.extract::<PyBytes>(py)?.data(py))
-                .to_owned();
-        let data_file =
-            get_path_from_bytes(data_file.extract::<PyBytes>(py)?.data(py))
-                .to_owned();
-        let revlog_type = RevlogType::try_from(revlog_type)
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-        let data_config = extract_data_config(py, data_config, revlog_type)?;
-        let delta_config =
-            extract_delta_config(py, delta_config, revlog_type)?;
-        let feature_config =
-            extract_feature_config(py, feature_config, revlog_type)?;
-        let options = RevlogOpenOptions::new(
-            inline,
-            data_config,
-            delta_config,
-            feature_config,
-        );
-
-        // Safety: we keep the buffer around inside the class as `index_mmap`
-        let (buf, bytes) = unsafe { mmap_keeparound(py, index_data)? };
-        let index = Index::new(bytes, options.index_header())
-            .map_err(|e| revlog_error_from_msg(py, e))?;
-
-        let base = &vfs_base.extract::<PyBytes>(py)?;
-        let base = get_path_from_bytes(base.data(py)).to_owned();
-        let core = CoreInnerRevlog::new(
-            Box::new(FnCacheVfs::new(
-                base,
-                vfs_is_readonly,
-                Box::new(PyFnCache::new(fncache)),
-            )),
-            index,
-            index_file,
-            data_file,
-            data_config,
-            delta_config,
-            feature_config,
-        );
-        Self::create_instance(
-            py,
-            core,
-            RefCell::new(None),
-            RefCell::new(None),
-            RefCell::new(None),
-            RefCell::new(buf),
-            RefCell::new(None),
-            RefCell::new(None),
-            RefCell::new(None),
-            use_persistent_nodemap,
-            AtomicUsize::new(0),
-        )
-    }
-}
-
-py_class!(pub class NodeTree |py| {
-    data nt: RefCell<CoreNodeTree>;
-    data index: RefCell<UnsafePyLeaked<PySharedIndex>>;
-
-    def __new__(_cls, index: PyObject) -> PyResult<NodeTree> {
-        let index = py_rust_index_to_graph(py, index)?;
-        let nt = CoreNodeTree::default();  // in-RAM, fully mutable
-        Self::create_instance(py, RefCell::new(nt), RefCell::new(index))
-    }
-
-    /// Tell whether the NodeTree is still valid
-    ///
-    /// In case of mutation of the index, the given results are not
-    /// guaranteed to be correct, and in fact, the methods borrowing
-    /// the inner index would fail because of `PySharedRef` poisoning
-    /// (generation-based guard), same as iterating on a `dict` that has
-    /// been meanwhile mutated.
-    def is_invalidated(&self) -> PyResult<bool> {
-        let leaked = &self.index(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let result = unsafe { leaked.try_borrow(py) };
-        // two cases for result to be an error:
-        // - the index has previously been mutably borrowed
-        // - there is currently a mutable borrow
-        // in both cases this amounts for previous results related to
-        // the index to still be valid.
-        Ok(result.is_err())
-    }
-
-    def insert(&self, rev: PyRevision) -> PyResult<PyObject> {
-        let leaked = &self.index(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let index = &*unsafe { leaked.try_borrow(py)? };
-
-        let rev = UncheckedRevision(rev.0);
-        let rev = index
-            .check_revision(rev)
-            .ok_or_else(|| rev_not_in_index(py, rev))?;
-        if rev == NULL_REVISION {
-            return Err(rev_not_in_index(py, rev.into()))
-        }
-
-        let entry = index.inner.get_entry(rev).expect("entry should exist");
-        let mut nt = self.nt(py).borrow_mut();
-        nt.insert(index, entry.hash(), rev).map_err(|e| nodemap_error(py, e))?;
-
-        Ok(py.None())
-    }
-
-    /// Lookup by node hex prefix in the NodeTree, returning revision number.
-    ///
-    /// This is not part of the classical NodeTree API, but is good enough
-    /// for unit testing, as in `test-rust-revlog.py`.
-    def prefix_rev_lookup(
-        &self,
-        node_prefix: PyBytes
-    ) -> PyResult<Option<PyRevision>> {
-        let prefix = NodePrefix::from_hex(node_prefix.data(py))
-            .map_err(|_| PyErr::new::<ValueError, _>(
-                py,
-                format!("Invalid node or prefix {:?}",
-                        node_prefix.as_object()))
-            )?;
-
-        let nt = self.nt(py).borrow();
-        let leaked = &self.index(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let index = &*unsafe { leaked.try_borrow(py)? };
-
-        Ok(nt.find_bin(index, prefix)
-               .map_err(|e| nodemap_error(py, e))?
-               .map(|r| r.into())
-        )
-    }
-
-    def shortest(&self, node: PyBytes) -> PyResult<usize> {
-        let nt = self.nt(py).borrow();
-        let leaked = &self.index(py).borrow();
-        // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
-        let idx = &*unsafe { leaked.try_borrow(py)? };
-        match nt.unique_prefix_len_node(idx, &node_from_py_bytes(py, &node)?)
-        {
-            Ok(Some(l)) => Ok(l),
-            Ok(None) => Err(revlog_error(py)),
-            Err(e) => Err(nodemap_error(py, e)),
-        }
-    }
-});
-
-fn panic_after_error(_py: Python) -> ! {
-    unsafe {
-        python3_sys::PyErr_Print();
-    }
-    panic!("Python API called failed");
-}
-
-/// # Safety
-///
-/// Don't call this. Its only caller is taken from `PyO3`.
-unsafe fn cast_from_owned_ptr_or_panic<T>(
-    py: Python,
-    p: *mut python3_sys::PyObject,
-) -> T
-where
-    T: cpython::PythonObjectWithCheckedDowncast,
-{
-    if p.is_null() {
-        panic_after_error(py);
-    } else {
-        PyObject::from_owned_ptr(py, p).cast_into(py).unwrap()
-    }
-}
-
-fn with_pybytes_buffer<F>(
-    py: Python,
-    len: usize,
-    init: F,
-) -> Result<PyBytes, RevlogError>
-where
-    F: FnOnce(
-        &mut dyn RevisionBuffer<Target = PyBytes>,
-    ) -> Result<(), RevlogError>,
-{
-    // Largely inspired by code in PyO3
-    // https://pyo3.rs/main/doc/pyo3/types/struct.pybytes#method.new_bound_with
-    unsafe {
-        let pyptr = python3_sys::PyBytes_FromStringAndSize(
-            std::ptr::null(),
-            len as python3_sys::Py_ssize_t,
-        );
-        let pybytes = cast_from_owned_ptr_or_panic::<PyBytes>(py, pyptr);
-        let buffer: *mut u8 = python3_sys::PyBytes_AsString(pyptr).cast();
-        debug_assert!(!buffer.is_null());
-        let mut rev_buf = PyRevisionBuffer::new(pybytes, buffer, len);
-        // Initialise the bytestring in init
-        // If init returns an Err, the buffer is deallocated by `pybytes`
-        init(&mut rev_buf).map(|_| rev_buf.finish())
-    }
-}
-
-/// Wrapper around a Python-provided buffer into which the revision contents
-/// will be written. Done for speed in order to save a large allocation + copy.
-struct PyRevisionBuffer {
-    py_bytes: PyBytes,
-    _buf: *mut u8,
-    len: usize,
-    current_buf: *mut u8,
-    current_len: usize,
-}
-
-impl PyRevisionBuffer {
-    /// # Safety
-    ///
-    /// `buf` should be the start of the allocated bytes of `bytes`, and `len`
-    /// exactly the length of said allocated bytes.
-    #[inline]
-    unsafe fn new(bytes: PyBytes, buf: *mut u8, len: usize) -> Self {
-        Self {
-            py_bytes: bytes,
-            _buf: buf,
-            len,
-            current_len: 0,
-            current_buf: buf,
-        }
-    }
-
-    /// Number of bytes that have been copied to. Will be different to the
-    /// total allocated length of the buffer unless the revision is done being
-    /// written.
-    #[inline]
-    fn current_len(&self) -> usize {
-        self.current_len
-    }
-}
-
-impl RevisionBuffer for PyRevisionBuffer {
-    type Target = PyBytes;
-
-    #[inline]
-    fn extend_from_slice(&mut self, slice: &[u8]) {
-        assert!(self.current_len + slice.len() <= self.len);
-        unsafe {
-            // We cannot use `copy_from_nonoverlapping` since it's *possible*
-            // to create a slice from the same Python memory region using
-            // [`PyBytesDeref`]. Probable that LLVM has an optimization anyway?
-            self.current_buf.copy_from(slice.as_ptr(), slice.len());
-            self.current_buf = self.current_buf.add(slice.len());
-        }
-        self.current_len += slice.len()
-    }
-
-    #[inline]
-    fn finish(self) -> Self::Target {
-        // catch unzeroed bytes before it becomes undefined behavior
-        assert_eq!(
-            self.current_len(),
-            self.len,
-            "not enough bytes read for revision"
-        );
-        self.py_bytes
-    }
-}
-
-fn revlog_error(py: Python) -> PyErr {
-    match py
-        .import("mercurial.error")
-        .and_then(|m| m.get(py, "RevlogError"))
-    {
-        Err(e) => e,
-        Ok(cls) => PyErr::from_instance(
-            py,
-            cls.call(py, (py.None(),), None).ok().into_py_object(py),
-        ),
-    }
-}
-
-fn graph_error(py: Python, _err: hg::GraphError) -> PyErr {
-    // ParentOutOfRange is currently the only alternative
-    // in `hg::GraphError`. The C index always raises this simple ValueError.
-    PyErr::new::<ValueError, _>(py, "parent out of range")
-}
-
-fn nodemap_rev_not_in_index(py: Python, rev: UncheckedRevision) -> PyErr {
-    PyErr::new::<ValueError, _>(
-        py,
-        format!(
-            "Inconsistency: Revision {} found in nodemap \
-             is not in revlog index",
-            rev
-        ),
-    )
-}
-
-fn rev_not_in_index(py: Python, rev: UncheckedRevision) -> PyErr {
-    PyErr::new::<ValueError, _>(
-        py,
-        format!("revlog index out of range: {}", rev),
-    )
-}
-
-/// Standard treatment of NodeMapError
-fn nodemap_error(py: Python, err: NodeMapError) -> PyErr {
-    match err {
-        NodeMapError::MultipleResults => revlog_error(py),
-        NodeMapError::RevisionNotInIndex(r) => nodemap_rev_not_in_index(py, r),
-    }
-}
-
-/// Create the module, with __package__ given from parent
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.revlog", package);
-    let m = PyModule::new(py, dotted_name)?;
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "RevLog - Rust implementations")?;
-
-    m.add_class::<NodeTree>(py)?;
-    m.add_class::<InnerRevlog>(py)?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/update.rs b/rust/hg-cpython/src/update.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy91cGRhdGUucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/update.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-// debug.rs
-//
-// Copyright 2024 Mercurial developers
-//
-// This software may be used and distributed according to the terms of the
-// GNU General Public License version 2 or any later version.
-
-//! Module for updating a repository.
-use cpython::{PyDict, PyModule, PyObject, PyResult, Python};
-use hg::{
-    progress::{HgProgressBar, Progress},
-    update::update_from_null,
-    BaseRevision,
-};
-
-use crate::{
-    exceptions::FallbackError,
-    utils::{hgerror_to_pyerr, repo_from_path, with_sigint_wrapper},
-};
-
-pub fn update_from_null_fast_path(
-    py: Python,
-    repo_path: PyObject,
-    to: BaseRevision,
-    num_cpus: Option<usize>,
-) -> PyResult<usize> {
-    log::trace!("Using update from null fastpath");
-    let repo = repo_from_path(py, repo_path)?;
-    let progress: &dyn Progress = &HgProgressBar::new("updating");
-
-    let res = with_sigint_wrapper(py, || {
-        update_from_null(&repo, to.into(), progress, num_cpus)
-    })?;
-
-    hgerror_to_pyerr(py, res)
-}
-
-pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
-    let dotted_name = &format!("{}.update", package);
-    let m = PyModule::new(py, dotted_name)?;
-
-    m.add(py, "__package__", package)?;
-    m.add(py, "__doc__", "Rust module for updating a repository")?;
-    m.add(py, "FallbackError", py.get_type::<FallbackError>())?;
-    m.add(
-        py,
-        "update_from_null",
-        py_fn!(
-            py,
-            update_from_null_fast_path(
-                repo_path: PyObject,
-                to: BaseRevision,
-                num_cpus: Option<usize>
-            )
-        ),
-    )?;
-
-    let sys = PyModule::import(py, "sys")?;
-    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
-    sys_modules.set_item(py, dotted_name, &m)?;
-
-    Ok(m)
-}
diff --git a/rust/hg-cpython/src/utils.rs b/rust/hg-cpython/src/utils.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy91dGlscy5ycw==..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/utils.rs
+++ /dev/null
@@ -1,144 +0,0 @@
-use cpython::exc::{KeyboardInterrupt, ValueError};
-use cpython::{
-    ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyObject, PyResult,
-    PyTuple, Python, ToPyObject,
-};
-use hg::config::Config;
-use hg::errors::HgError;
-use hg::repo::{Repo, RepoError};
-use hg::revlog::Node;
-use hg::utils::files::get_path_from_bytes;
-
-use crate::exceptions::FallbackError;
-
-#[allow(unused)]
-pub fn print_python_trace(py: Python) -> PyResult<PyObject> {
-    eprintln!("===============================");
-    eprintln!("Printing Python stack from Rust");
-    eprintln!("===============================");
-    let traceback = py.import("traceback")?;
-    let sys = py.import("sys")?;
-    let kwargs = PyDict::new(py);
-    kwargs.set_item(py, "file", sys.get(py, "stderr")?)?;
-    traceback.call(py, "print_stack", PyTuple::new(py, &[]), Some(&kwargs))
-}
-
-pub fn hgerror_to_pyerr<T>(
-    py: Python,
-    error: Result<T, HgError>,
-) -> PyResult<T> {
-    error.map_err(|e| match e {
-        HgError::IoError { .. } => {
-            PyErr::new::<cpython::exc::IOError, _>(py, e.to_string())
-        }
-        HgError::UnsupportedFeature(e) => {
-            let as_string = e.to_string();
-            log::trace!("Update from null fallback: {}", as_string);
-            PyErr::new::<FallbackError, _>(py, &as_string)
-        }
-        HgError::RaceDetected(_) => {
-            unreachable!("must not surface to the user")
-        }
-        HgError::Path(path_error) => {
-            let msg = PyBytes::new(py, path_error.to_string().as_bytes());
-            let cls = py
-                .import("mercurial.error")
-                .and_then(|m| m.get(py, "InputError"))
-                .unwrap();
-            PyErr::from_instance(
-                py,
-                cls.call(py, (msg,), None).ok().into_py_object(py),
-            )
-        }
-        HgError::InterruptReceived => {
-            PyErr::new::<KeyboardInterrupt, _>(py, "")
-        }
-        e => PyErr::new::<cpython::exc::RuntimeError, _>(py, e.to_string()),
-    })
-}
-
-pub fn repo_error_to_pyerr<T>(
-    py: Python,
-    error: Result<T, RepoError>,
-) -> PyResult<T> {
-    hgerror_to_pyerr(py, error.map_err(HgError::from))
-}
-
-/// Get a repository from a given [`PyObject`] path, and bubble up any error
-/// that comes up.
-pub fn repo_from_path(py: Python, repo_path: PyObject) -> Result<Repo, PyErr> {
-    // TODO make the Config a Python class and downcast it here, otherwise we
-    // lose CLI args and runtime overrides done in Python.
-    let config =
-        hgerror_to_pyerr(py, Config::load_non_repo().map_err(HgError::from))?;
-    let py_bytes = &repo_path.extract::<PyBytes>(py)?;
-    let repo_path = py_bytes.data(py);
-    let repo = repo_error_to_pyerr(
-        py,
-        Repo::find(&config, Some(get_path_from_bytes(repo_path).to_owned())),
-    )?;
-    Ok(repo)
-}
-
-// Necessary evil for the time being, could maybe be moved to
-// a TryFrom in Node itself
-const NODE_BYTES_LENGTH: usize = 20;
-type NodeData = [u8; NODE_BYTES_LENGTH];
-
-/// Copy incoming Python bytes given as `PyObject` into `Node`,
-/// doing the necessary checks
-pub fn node_from_py_object<'a>(
-    py: Python,
-    bytes: &'a PyObject,
-) -> PyResult<Node> {
-    let as_py_bytes: &'a PyBytes = bytes.extract(py)?;
-    node_from_py_bytes(py, as_py_bytes)
-}
-
-/// Clone incoming Python bytes given as `PyBytes` as a `Node`,
-/// doing the necessary checks.
-pub fn node_from_py_bytes(py: Python, bytes: &PyBytes) -> PyResult<Node> {
-    <NodeData>::try_from(bytes.data(py))
-        .map_err(|_| {
-            PyErr::new::<ValueError, _>(
-                py,
-                format!("{}-byte hash required", NODE_BYTES_LENGTH),
-            )
-        })
-        .map(Into::into)
-}
-
-/// Wrap a call to `func` so that Python's `SIGINT` handler is first stored,
-/// then restored after the call to `func` and finally raised if
-/// `func` returns a [`HgError::InterruptReceived`]
-pub fn with_sigint_wrapper<R>(
-    py: Python,
-    func: impl Fn() -> Result<R, HgError>,
-) -> PyResult<Result<R, HgError>> {
-    let signal_py_mod = py.import("signal")?;
-    let sigint_py_const = signal_py_mod.get(py, "SIGINT")?;
-    let old_handler = signal_py_mod.call(
-        py,
-        "getsignal",
-        PyTuple::new(py, &[sigint_py_const.clone_ref(py)]),
-        None,
-    )?;
-    let res = func();
-    // Reset the old signal handler in Python because we've may have changed it
-    signal_py_mod.call(
-        py,
-        "signal",
-        PyTuple::new(py, &[sigint_py_const.clone_ref(py), old_handler]),
-        None,
-    )?;
-    if let Err(HgError::InterruptReceived) = res {
-        // Trigger the signal in Python
-        signal_py_mod.call(
-            py,
-            "raise_signal",
-            PyTuple::new(py, &[sigint_py_const]),
-            None,
-        )?;
-    }
-    Ok(res)
-}
diff --git a/rust/hg-cpython/src/vfs.rs b/rust/hg-cpython/src/vfs.rs
deleted file mode 100644
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1jcHl0aG9uL3NyYy92ZnMucnM=..0000000000000000000000000000000000000000
--- a/rust/hg-cpython/src/vfs.rs
+++ /dev/null
@@ -1,304 +0,0 @@
-use std::{
-    cell::Cell,
-    fs::File,
-    io::Error,
-    os::fd::{AsRawFd, FromRawFd},
-    path::{Path, PathBuf},
-};
-
-use cpython::{
-    ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyInt, PyObject,
-    PyResult, PyTuple, Python, PythonObject, ToPyObject,
-};
-use hg::{
-    errors::{HgError, IoResultExt},
-    exit_codes,
-    utils::files::{get_bytes_from_path, get_path_from_bytes},
-    vfs::{Vfs, VfsFile},
-};
-
-/// Wrapper around a Python VFS object to call back into Python from `hg-core`.
-pub struct PyVfs {
-    inner: PyObject,
-    base: PathBuf,
-}
-
-impl Clone for PyVfs {
-    fn clone(&self) -> Self {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        Self {
-            inner: self.inner.clone_ref(py),
-            base: self.base.clone(),
-        }
-    }
-}
-
-impl PyVfs {
-    pub fn new(
-        _py: Python,
-        py_vfs: PyObject,
-        base: PathBuf,
-    ) -> PyResult<Self> {
-        Ok(Self {
-            inner: py_vfs,
-            base,
-        })
-    }
-
-    fn inner_open(
-        &self,
-        filename: &Path,
-        create: bool,
-        check_ambig: bool,
-        atomic_temp: bool,
-        write: bool,
-    ) -> Result<(File, Option<PathBuf>), HgError> {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        let mode = if atomic_temp {
-            PyBytes::new(py, b"w")
-        } else if create {
-            PyBytes::new(py, b"w+")
-        } else if write {
-            PyBytes::new(py, b"r+")
-        } else {
-            PyBytes::new(py, b"rb")
-        };
-        let res = self.inner.call(
-            py,
-            (
-                PyBytes::new(py, &get_bytes_from_path(filename)),
-                mode,
-                atomic_temp,
-                check_ambig,
-            ),
-            None,
-        );
-        match res {
-            Ok(tup) => {
-                let tup = tup
-                    .extract::<PyTuple>(py)
-                    .map_err(|e| vfs_error("vfs did not return a tuple", e))?;
-                let fileno = tup.get_item(py, 0).extract(py).map_err(|e| {
-                    vfs_error("vfs did not return a valid fileno", e)
-                })?;
-                let temp_name = tup.get_item(py, 1);
-                // Safety: this must be a valid owned file descriptor, and
-                // Python has just given it to us, it will only exist here now
-                let file = unsafe { File::from_raw_fd(fileno) };
-                let temp_name = if atomic_temp {
-                    Some(
-                        get_path_from_bytes(
-                            temp_name
-                                .extract::<PyBytes>(py)
-                                .map_err(|e| vfs_error("invalid tempname", e))?
-                                .data(py),
-                        )
-                        .to_owned(),
-                    )
-                } else {
-                    None
-                };
-                Ok((file, temp_name))
-            }
-            Err(mut e) => {
-                // TODO surely there is a better way of comparing
-                if e.instance(py).get_type(py).name(py) == "FileNotFoundError"
-                {
-                    return Err(HgError::IoError {
-                        error: Error::new(
-                            std::io::ErrorKind::NotFound,
-                            e.instance(py).to_string(),
-                        ),
-                        context: hg::errors::IoErrorContext::ReadingFile(
-                            filename.to_owned(),
-                        ),
-                    });
-                }
-                Err(vfs_error("failed to call opener", e))
-            }
-        }
-    }
-}
-
-fn vfs_error(reason: impl Into<String>, mut error: PyErr) -> HgError {
-    let gil = &Python::acquire_gil();
-    let py = gil.python();
-    HgError::abort(
-        format!("{}: {}", reason.into(), error.instance(py)),
-        exit_codes::ABORT,
-        None,
-    )
-}
-
-py_class!(pub class PyFile |py| {
-    data number: Cell<i32>;
-
-    def fileno(&self) -> PyResult<PyInt> {
-        Ok(self.number(py).get().to_py_object(py))
-    }
-});
-
-impl Vfs for PyVfs {
-    fn open(&self, filename: &Path) -> Result<VfsFile, HgError> {
-        self.inner_open(filename, false, false, false, false)
-            .map(|(f, _)| VfsFile::normal(f, filename.to_owned()))
-    }
-
-    fn open_write(&self, filename: &Path) -> Result<VfsFile, HgError> {
-        self.inner_open(filename, false, false, false, true)
-            .map(|(f, _)| VfsFile::normal(f, filename.to_owned()))
-    }
-
-    fn open_check_ambig(&self, filename: &Path) -> Result<VfsFile, HgError> {
-        self.inner_open(filename, false, true, false, true)
-            .map(|(f, _)| VfsFile::normal(f, filename.to_owned()))
-    }
-
-    fn create(
-        &self,
-        filename: &Path,
-        check_ambig: bool,
-    ) -> Result<VfsFile, HgError> {
-        self.inner_open(filename, true, check_ambig, false, true)
-            .map(|(f, _)| VfsFile::normal(f, filename.to_owned()))
-    }
-
-    fn create_atomic(
-        &self,
-        filename: &Path,
-        check_ambig: bool,
-    ) -> Result<VfsFile, HgError> {
-        self.inner_open(filename, true, false, true, true).map(
-            |(fp, temp_name)| {
-                VfsFile::Atomic(hg::vfs::AtomicFile::from_file(
-                    fp,
-                    check_ambig,
-                    temp_name.expect("temp name should exist"),
-                    filename.to_owned(),
-                ))
-            },
-        )
-    }
-
-    fn file_size(&self, file: &VfsFile) -> Result<u64, HgError> {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        let raw_fd = file.as_raw_fd();
-        let py_fd = PyFile::create_instance(py, Cell::new(raw_fd))
-            .expect("create_instance cannot fail");
-        let fstat = self
-            .inner
-            .call_method(py, "fstat", (py_fd,), None)
-            .map_err(|e| {
-                vfs_error(format!("failed to fstat fd '{}'", raw_fd), e)
-            })?;
-        fstat
-            .getattr(py, "st_size")
-            .map(|v| {
-                v.extract(py).map_err(|e| {
-                    vfs_error(format!("invalid size for fd '{}'", raw_fd), e)
-                })
-            })
-            .map_err(|e| {
-                vfs_error(format!("failed to get size of fd '{}'", raw_fd), e)
-            })?
-    }
-
-    fn exists(&self, filename: &Path) -> bool {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        self.inner
-            .call_method(
-                py,
-                "exists",
-                (PyBytes::new(py, &get_bytes_from_path(filename)),),
-                None,
-            )
-            .unwrap_or_else(|_| false.into_py_object(py).into_object())
-            .extract(py)
-            .unwrap()
-    }
-
-    fn unlink(&self, filename: &Path) -> Result<(), HgError> {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        if let Err(e) = self.inner.call_method(
-            py,
-            "unlink",
-            (PyBytes::new(py, &get_bytes_from_path(filename)),),
-            None,
-        ) {
-            return Err(vfs_error(
-                format!("failed to unlink '{}'", filename.display()),
-                e,
-            ));
-        }
-        Ok(())
-    }
-
-    fn rename(
-        &self,
-        from: &Path,
-        to: &Path,
-        check_ambig: bool,
-    ) -> Result<(), HgError> {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        let kwargs = PyDict::new(py);
-        kwargs
-            .set_item(py, "checkambig", check_ambig)
-            .map_err(|e| vfs_error("dict setitem failed", e))?;
-        if let Err(e) = self.inner.call_method(
-            py,
-            "rename",
-            (
-                PyBytes::new(py, &get_bytes_from_path(from)),
-                PyBytes::new(py, &get_bytes_from_path(to)),
-            ),
-            Some(&kwargs),
-        ) {
-            let msg = format!(
-                "failed to rename '{}' to '{}'",
-                from.display(),
-                to.display()
-            );
-            return Err(vfs_error(msg, e));
-        }
-        Ok(())
-    }
-
-    fn copy(&self, from: &Path, to: &Path) -> Result<(), HgError> {
-        let gil = &Python::acquire_gil();
-        let py = gil.python();
-        let from = self
-            .inner
-            .call_method(
-                py,
-                "join",
-                (PyBytes::new(py, &get_bytes_from_path(from)),),
-                None,
-            )
-            .unwrap();
-        let from = from.extract::<PyBytes>(py).unwrap();
-        let from = get_path_from_bytes(from.data(py));
-        let to = self
-            .inner
-            .call_method(
-                py,
-                "join",
-                (PyBytes::new(py, &get_bytes_from_path(to)),),
-                None,
-            )
-            .unwrap();
-        let to = to.extract::<PyBytes>(py).unwrap();
-        let to = get_path_from_bytes(to.data(py));
-        std::fs::copy(from, to).when_writing_file(to)?;
-        Ok(())
-    }
-
-    fn base(&self) -> &Path {
-        &self.base
-    }
-}
diff --git a/rust/hg-pyo3/Cargo.toml b/rust/hg-pyo3/Cargo.toml
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL0NhcmdvLnRvbWw=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL0NhcmdvLnRvbWw= 100644
--- a/rust/hg-pyo3/Cargo.toml
+++ b/rust/hg-pyo3/Cargo.toml
@@ -12,8 +12,9 @@
 
 [features]
 extension-module = ["pyo3/extension-module"]
+full-tracing = ["hg-core/full-tracing", "dep:tracing-chrome"]
 default = ["extension-module"]
 
 [dependencies]
 pyo3 = { version = "0.23.1" }
 pyo3-sharedref = { path = "../pyo3-sharedref" }
@@ -15,7 +16,7 @@
 default = ["extension-module"]
 
 [dependencies]
 pyo3 = { version = "0.23.1" }
 pyo3-sharedref = { path = "../pyo3-sharedref" }
-hg-core = { path = "../hg-core"}
+hg-core = { path = "../hg-core", default-features = false }
 stable_deref_trait = "1.2.0"
@@ -21,4 +22,2 @@
 stable_deref_trait = "1.2.0"
-log = "0.4.17"
-logging_timer = "1.1.0"
 derive_more = "0.99.17"
@@ -24,4 +23,3 @@
 derive_more = "0.99.17"
-env_logger = "0.9.3"
 vcsgraph = "0.2.0"
 crossbeam-channel = "0.5.14"
@@ -26,2 +24,6 @@
 vcsgraph = "0.2.0"
 crossbeam-channel = "0.5.14"
+tracing = { version = "0.1.41", features = ["attributes"] }
+tracing-chrome = { version = "0.7.2", optional = true }
+tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
+dashmap = "6.1.0"
diff --git a/rust/hg-pyo3/src/ancestors.rs b/rust/hg-pyo3/src/ancestors.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy9hbmNlc3RvcnMucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9hbmNlc3RvcnMucnM= 100644
--- a/rust/hg-pyo3/src/ancestors.rs
+++ b/rust/hg-pyo3/src/ancestors.rs
@@ -228,7 +228,7 @@
         mut slf: PyRefMut<'_, Self>,
         revs: &Bound<'_, PyAny>,
     ) -> PyResult<()> {
-        // Original comment from hg-cpython:
+        // Original comment from the now extinct hg-cpython:
         //   this is very lame: we convert to a Rust set, update it in place
         //   and then convert back to Python, only to have Python remove the
         //   excess (thankfully, Python is happy with a list or even an
diff --git a/rust/hg-pyo3/src/dirstate/status.rs b/rust/hg-pyo3/src/dirstate/status.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy9kaXJzdGF0ZS9zdGF0dXMucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9kaXJzdGF0ZS9zdGF0dXMucnM= 100644
--- a/rust/hg-pyo3/src/dirstate/status.rs
+++ b/rust/hg-pyo3/src/dirstate/status.rs
@@ -49,7 +49,7 @@
     collection: &[(impl AsRef<HgPath>, BadMatch)],
 ) -> PyResult<Py<PyList>> {
     let get_error_message = |code: i32| -> String {
-        // hg-cpython here calling the Python interpreter
+        // hg-pyo3 here calling the Python interpreter
         // using `os.strerror`. This seems to be equivalent and infallible
         std::io::Error::from_raw_os_error(code).to_string()
     };
@@ -165,7 +165,8 @@
     match err {
         StatusError::Pattern(e) => {
             let as_string = e.to_string();
-            log::trace!("Rust status fallback, `{}`", &as_string);
+            tracing::debug!("Rust status fallback, see trace-level logs");
+            tracing::trace!("{}", as_string);
             FallbackError::new_err(as_string)
         }
         e => to_string_value_error(e),
diff --git a/rust/hg-pyo3/src/lib.rs b/rust/hg-pyo3/src/lib.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy9saWIucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9saWIucnM= 100644
--- a/rust/hg-pyo3/src/lib.rs
+++ b/rust/hg-pyo3/src/lib.rs
@@ -8,6 +8,7 @@
 mod exceptions;
 mod node;
 mod path;
+mod pytracing;
 mod repo;
 mod revision;
 mod revlog;
@@ -24,7 +25,6 @@
         "Mercurial core concepts - Rust implementation exposed via PyO3",
     )?;
     let dotted_name: String = m.getattr("__name__")?.extract()?;
-    env_logger::init();
 
     m.add_submodule(&ancestors::init_module(py, &dotted_name)?)?;
     m.add_submodule(&copy_tracing::init_module(py, &dotted_name)?)?;
@@ -33,6 +33,7 @@
     m.add_submodule(&discovery::init_module(py, &dotted_name)?)?;
     m.add_submodule(&revlog::init_module(py, &dotted_name)?)?;
     m.add_submodule(&update::init_module(py, &dotted_name)?)?;
+    m.add_submodule(&pytracing::init_module(py, &dotted_name)?)?;
     m.add("GraphError", py.get_type::<exceptions::GraphError>())?;
     Ok(())
 }
diff --git a/rust/hg-pyo3/src/pytracing.rs b/rust/hg-pyo3/src/pytracing.rs
new file mode 100644
index 0000000000000000000000000000000000000000..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9weXRyYWNpbmcucnM=
--- /dev/null
+++ b/rust/hg-pyo3/src/pytracing.rs
@@ -0,0 +1,257 @@
+use crate::utils::new_submodule;
+#[cfg(feature = "full-tracing")]
+use full_tracing::{setup_tracing_guard, PyTracing};
+
+use pyo3::prelude::*;
+#[cfg(not(feature = "full-tracing"))]
+use tracing_subscriber::{fmt::format::FmtSpan, prelude::*, EnvFilter};
+
+#[cfg(not(feature = "full-tracing"))]
+/// Enable an env-filtered logger to stderr
+fn setup_tracing() {
+    let registry = tracing_subscriber::registry()
+        .with(tracing_subscriber::fmt::layer())
+        .with(EnvFilter::from_default_env());
+    let fmt_layer = tracing_subscriber::fmt::layer()
+        .with_writer(std::io::stderr)
+        .with_span_events(FmtSpan::CLOSE);
+    registry.with(fmt_layer).init()
+}
+
+#[cfg(feature = "full-tracing")]
+mod full_tracing {
+    use dashmap::DashMap;
+    use pyo3::{prelude::*, types::PyTuple};
+    use std::sync::{Mutex, OnceLock};
+    use tracing::{
+        field::{Field, Visit},
+        span::{Attributes, Id},
+        Subscriber,
+    };
+    use tracing_chrome::{ChromeLayerBuilder, EventOrSpan, FlushGuard};
+    use tracing_subscriber::{
+        layer::{Context, SubscriberExt as _},
+        registry::LookupSpan,
+        util::SubscriberInitExt as _,
+        EnvFilter, Layer,
+    };
+
+    /// A span target name to mark those from Python
+    pub const PYTHON_TARGET_NAME: &str = "from_python";
+
+    /// A mapping of span ids to user-facing trace names.
+    /// See [`PyTracingLayer`] for more info.
+    static ID_TO_NAMES: OnceLock<DashMap<u64, String>> = OnceLock::new();
+
+    /// A Python front-end singleton to hook into the Rust tracing system,
+    /// so that all traces are collected in a single place.
+    #[pyclass]
+    pub struct PyTracing;
+
+    #[pymethods]
+    impl PyTracing {
+        /// Returns a context manager that will correspond to a span `name`
+        /// with level `debug`
+        pub fn span(&self, name: &str) -> PyTracingSpan {
+            let span = tracing::debug_span!(
+                target: PYTHON_TARGET_NAME,
+                "", // The name is useless in this context anyway
+                py_actual_name = name
+            );
+            PyTracingSpan::new(TracingSpanState::Created(span))
+        }
+    }
+
+    /// A simple context manager that corresponds to a tracing span created
+    /// from a Python context.
+    #[pyclass(unsendable)]
+    pub struct PyTracingSpan {
+        inner: Option<TracingSpanState>,
+    }
+
+    impl PyTracingSpan {
+        pub fn new(inner: TracingSpanState) -> Self {
+            Self { inner: Some(inner) }
+        }
+    }
+
+    #[pymethods]
+    impl PyTracingSpan {
+        fn __enter__(&mut self) -> PyResult<()> {
+            if let Some(TracingSpanState::Created(s)) =
+                std::mem::take(&mut self.inner)
+            {
+                let entered = TracingSpanState::Entered(s.entered());
+                self.inner = Some(entered);
+            }
+            Ok(())
+        }
+
+        #[pyo3(signature = (*_args))]
+        fn __exit__(&mut self, _args: &Bound<'_, PyTuple>) {
+            if let Some(TracingSpanState::Entered(s)) =
+                std::mem::take(&mut self.inner)
+            {
+                s.exit();
+            }
+        }
+    }
+
+    /// See [`ChromeTracingGuard`]
+    /// Enable an env-filtered chrome-trace logger to a file.
+    /// Defaults to writing to `./trace-{unix epoch in micros}.json`, but can
+    /// be overridden via the `HG_TRACE_PATH` environment variable.
+    pub fn setup_tracing_guard() -> ChromeTracingGuard {
+        // Expect that if any events are recorded, we probably are going to
+        // record a few. 16 is pretty arbitrary, but seems like a good
+        // balance between not re-sizing for most cases and not
+        // over-provisioning.
+        ID_TO_NAMES.get_or_init(|| DashMap::with_capacity(16));
+        let mut chrome_layer_builder =
+            ChromeLayerBuilder::new().name_fn(Box::new(|event_or_span| {
+                match event_or_span {
+                    EventOrSpan::Event(ev) => ev.metadata().name().into(),
+                    EventOrSpan::Span(span_ref) => {
+                        if span_ref.metadata().target() != PYTHON_TARGET_NAME {
+                            // Not a Python span, it has the correct name
+                            span_ref.name().into()
+                        } else {
+                            let id = span_ref.id();
+                            ID_TO_NAMES
+                                .get()
+                                .expect("ID_TO_NAMES should exist")
+                                .get(&id.into_u64())
+                                .map(|r| r.value().to_string())
+                                .unwrap_or_else(|| String::from("unknown"))
+                        }
+                    }
+                }
+            }));
+
+        // /!\ Keep in sync with rhg
+        if let Ok(path) = std::env::var("HG_TRACE_PATH") {
+            chrome_layer_builder = chrome_layer_builder.file(path);
+        }
+        let (chrome_layer, chrome_layer_guard) = chrome_layer_builder.build();
+        let registry = tracing_subscriber::registry()
+            .with(PyTracingLayer)
+            .with(EnvFilter::from_default_env());
+        let registry = registry.with(chrome_layer);
+        registry.init();
+        // Send this event as soon as possible to get the reference of how much
+        // time has happened since we've started tracing until the first actual
+        // tracing point.
+        tracing::info!(name: "tracing setup", "pyo3 chrome tracing setup done");
+        ChromeTracingGuard::new(chrome_layer_guard)
+    }
+
+    /// A [`Layer`] implementation that intercepts each new [`Span`] and
+    /// remembers dynamic information for those created from Python.
+    ///
+    /// [`tracing`] events and span have a static name (and location, etc.)
+    /// which allows them to be very low overhead. We are however tracing from
+    /// Python, which (of course) has no way of statically communicating
+    /// its names when compiling the Rust code. Thus, we keep track of all
+    /// spans and their dynamic names to map each span id to its name upon
+    /// writing to the trace.
+    pub struct PyTracingLayer;
+
+    impl<S> Layer<S> for PyTracingLayer
+    where
+        S: Subscriber,
+        S: for<'lookup> LookupSpan<'lookup>,
+    {
+        fn on_new_span(
+            &self,
+            attrs: &Attributes<'_>,
+            id: &Id,
+            _ctx: Context<'_, S>,
+        ) {
+            let mut visitor = PyTracingVisitor::new(id.into_u64());
+            attrs.values().record(&mut visitor);
+        }
+    }
+
+    /// A simple [`Visit`] implementation that inserts only Python-created
+    /// events in [`ID_TO_NAMES`].
+    struct PyTracingVisitor {
+        id: u64,
+    }
+
+    impl PyTracingVisitor {
+        pub fn new(id: u64) -> Self {
+            Self { id }
+        }
+    }
+
+    impl Visit for PyTracingVisitor {
+        fn record_debug(
+            &mut self,
+            _field: &Field,
+            _value: &dyn std::fmt::Debug,
+        ) {
+            // Do nothing
+        }
+        fn record_str(&mut self, field: &Field, value: &str) {
+            if field.name() == "py_actual_name" {
+                // This will only be called from the main thread because we
+                // filter out non-Python events
+                ID_TO_NAMES
+                    .get()
+                    .expect("ID_TO_NAMES should exist")
+                    .insert(self.id, value.to_string());
+            }
+        }
+    }
+
+    /// Keeps track of the current state of the span.
+    /// There is no state for "exited" because that simply means
+    /// it's been dropped.
+    pub enum TracingSpanState {
+        /// The span has only been create, but not yet entered
+        Created(tracing::Span),
+        /// The span has been entered
+        Entered(tracing::span::EnteredSpan),
+    }
+
+    /// A Python object whose entire purpose is to keep the [`FlushGuard`] for
+    /// tracing purposes, flushing to the trace file when dropped by
+    /// the Python process finishing.
+    #[pyclass]
+    pub struct ChromeTracingGuard {
+        guard: Mutex<FlushGuard>,
+    }
+
+    impl ChromeTracingGuard {
+        fn new(guard: FlushGuard) -> Self {
+            Self {
+                guard: Mutex::new(guard),
+            }
+        }
+    }
+
+    impl Drop for ChromeTracingGuard {
+        fn drop(&mut self) {
+            self.guard
+                .try_lock()
+                .map(|guard| {
+                    guard.flush();
+                })
+                .ok();
+        }
+    }
+}
+
+pub fn init_module<'py>(
+    py: Python<'py>,
+    package: &str,
+) -> PyResult<Bound<'py, PyModule>> {
+    let m = new_submodule(py, package, "tracing")?;
+    #[cfg(feature = "full-tracing")]
+    m.add("tracer", PyTracing)?;
+    #[cfg(feature = "full-tracing")]
+    m.add("_chrome_tracing_guard", setup_tracing_guard())?;
+    #[cfg(not(feature = "full-tracing"))]
+    setup_tracing();
+    Ok(m)
+}
diff --git a/rust/hg-pyo3/src/revision.rs b/rust/hg-pyo3/src/revision.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy9yZXZpc2lvbi5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9yZXZpc2lvbi5ycw== 100644
--- a/rust/hg-pyo3/src/revision.rs
+++ b/rust/hg-pyo3/src/revision.rs
@@ -10,7 +10,7 @@
 /// Revision as exposed to/from the Python layer.
 ///
 /// We need this indirection because of the orphan rule, meaning we can't
-/// implement a foreign trait (like [`cpython::ToPyObject`])
+/// implement a foreign trait (like [`pyo3::ToPyObject`])
 /// for a foreign type (like [`hg::UncheckedRevision`]).
 ///
 /// This also acts as a deterrent against blindly trusting Python to send
diff --git a/rust/hg-pyo3/src/revlog/mod.rs b/rust/hg-pyo3/src/revlog/mod.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy9yZXZsb2cvbW9kLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy9yZXZsb2cvbW9kLnJz 100644
--- a/rust/hg-pyo3/src/revlog/mod.rs
+++ b/rust/hg-pyo3/src/revlog/mod.rs
@@ -814,7 +814,7 @@
             UncheckedRevision(rev)
         } else {
             // here we could downcast to `PySlice` and use `indices()`, *but*
-            // the rust-cpython based version could not do that, and
+            // the PyO3 based version could not do that, and
             // `indices()` does some resolving that makes it not equivalent,
             // e.g., `idx[-1::]` has `start=0`. As we are currently in
             // transition, we keep it the old way (hoping it was consistent
@@ -1068,11 +1068,6 @@
         })?;
 
         Self::cache_new_heads_py_list(slf, head_revs)?;
-        // TODO discussion with Alphare: in hg-cpython,
-        // `cache_new_heads_node_ids_py_list` reconverts `head_nodes`,
-        // to store it in the cache attr that is **not actually used**.
-        // Should we drop the idea of this cache definition or actually
-        // use it? Perhaps in a later move for perf assessment?
         Ok(head_nodes)
     }
 
diff --git a/rust/hg-pyo3/src/update.rs b/rust/hg-pyo3/src/update.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy91cGRhdGUucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy91cGRhdGUucnM= 100644
--- a/rust/hg-pyo3/src/update.rs
+++ b/rust/hg-pyo3/src/update.rs
@@ -28,7 +28,7 @@
     to: BaseRevision,
     num_cpus: Option<usize>,
 ) -> PyResult<usize> {
-    log::trace!("Using update from null fastpath");
+    tracing::debug!("Using update from null fastpath");
     let repo = repo_from_path(repo_path)?;
     let progress: &dyn Progress = &HgProgressBar::new("updating");
 
diff --git a/rust/hg-pyo3/src/utils.rs b/rust/hg-pyo3/src/utils.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9oZy1weW8zL3NyYy91dGlscy5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9oZy1weW8zL3NyYy91dGlscy5ycw== 100644
--- a/rust/hg-pyo3/src/utils.rs
+++ b/rust/hg-pyo3/src/utils.rs
@@ -16,8 +16,8 @@
 /// Create the module, with `__package__` given from parent
 ///
 /// According to PyO3 documentation, which links to
-/// <https://github.com/PyO3/pyo3/issues/1517>, the same convoluted
-/// write to sys.modules has to be made as with the `cpython` crate.
+/// <https://github.com/PyO3/pyo3/issues/1517>, the convoluted write to
+/// `sys.modules` has to be made.
 pub(crate) fn new_submodule<'py>(
     py: Python<'py>,
     package_name: &str,
diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvQ2FyZ28udG9tbA==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvQ2FyZ28udG9tbA== 100644
--- a/rust/rhg/Cargo.toml
+++ b/rust/rhg/Cargo.toml
@@ -10,4 +10,7 @@
 [lints]
 workspace = true
 
+[features]
+full-tracing = ["hg-core/full-tracing", "dep:tracing-chrome"]
+
 [dependencies]
@@ -13,7 +16,7 @@
 [dependencies]
-hg-core = { path = "../hg-core"}
+hg-core = { path = "../hg-core", default-features = false }
 chrono = "0.4.23"
 clap = { version = "4", features = ["cargo"] }
 derive_more = "0.99.17"
 home = "0.5.4"
 lazy_static = "1.4.0"
@@ -15,8 +18,6 @@
 chrono = "0.4.23"
 clap = { version = "4", features = ["cargo"] }
 derive_more = "0.99.17"
 home = "0.5.4"
 lazy_static = "1.4.0"
-log = "0.4.17"
-logging_timer = "1.1.0"
 regex = "1.7.0"
@@ -22,8 +23,7 @@
 regex = "1.7.0"
-env_logger = "0.11"
 format-bytes = "0.3.0"
 shellexpand = { version = "3.1", features = ["full"]}
 whoami = "1.4"
 which = "4.3.0"
 rayon = "1.7.0"
 libc = "0.2.155"
@@ -24,6 +24,9 @@
 format-bytes = "0.3.0"
 shellexpand = { version = "3.1", features = ["full"]}
 whoami = "1.4"
 which = "4.3.0"
 rayon = "1.7.0"
 libc = "0.2.155"
+tracing = { version = "0.1.41", features = ["attributes"] }
+tracing-chrome = { version = "0.7.2", optional = true }
+tracing-subscriber = { version = "0.3.19", features = ["env-filter"]}
diff --git a/rust/rhg/src/commands/annotate.rs b/rust/rhg/src/commands/annotate.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2Fubm90YXRlLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2Fubm90YXRlLnJz 100644
--- a/rust/rhg/src/commands/annotate.rs
+++ b/rust/rhg/src/commands/annotate.rs
@@ -138,6 +138,7 @@
         .about(HELP_TEXT)
 }
 
+#[tracing::instrument(level = "debug", skip_all, name = "rhg annotate")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let config = invocation.config;
     if config.has_non_empty_section(b"annotate") {
diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2NhdC5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2NhdC5ycw== 100644
--- a/rust/rhg/src/commands/cat.rs
+++ b/rust/rhg/src/commands/cat.rs
@@ -29,7 +29,7 @@
         .about(HELP_TEXT)
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all, name = "rhg cat")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let cat_enabled = invocation.config.get_bool(b"rhg", b"cat")?;
     if !cat_enabled {
diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2RlYnVnZGF0YS5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2RlYnVnZGF0YS5ycw== 100644
--- a/rust/rhg/src/commands/debugdata.rs
+++ b/rust/rhg/src/commands/debugdata.rs
@@ -36,7 +36,7 @@
         .about(HELP_TEXT)
 }
 
-#[logging_timer::time("trace")]
+#[tracing::instrument(level = "debug", skip_all, name = "rhg debugdata")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let args = invocation.subcommand_args;
     let rev = args
diff --git a/rust/rhg/src/commands/debugignorerhg.rs b/rust/rhg/src/commands/debugignorerhg.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2RlYnVnaWdub3JlcmhnLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2RlYnVnaWdub3JlcmhnLnJz 100644
--- a/rust/rhg/src/commands/debugignorerhg.rs
+++ b/rust/rhg/src/commands/debugignorerhg.rs
@@ -5,7 +5,7 @@
 use hg::filepatterns::RegexCompleteness;
 use hg::matchers::{get_ignore_matcher_pre, ReSyntax};
 use hg::repo::Repo;
-use log::warn;
+use tracing::warn;
 
 pub const HELP_TEXT: &str = "
 Show effective hgignore patterns used by rhg.
diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2ZpbGVzLnJz..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL2ZpbGVzLnJz 100644
--- a/rust/rhg/src/commands/files.rs
+++ b/rust/rhg/src/commands/files.rs
@@ -38,6 +38,7 @@
         .about(HELP_TEXT)
 }
 
+#[tracing::instrument(level = "debug", skip_all, name = "rhg files")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     let relative_paths = match relative_paths(invocation.config)? {
         RelativePaths::Legacy => true,
diff --git a/rust/rhg/src/commands/status.rs b/rust/rhg/src/commands/status.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL3N0YXR1cy5ycw==..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL2NvbW1hbmRzL3N0YXR1cy5ycw== 100644
--- a/rust/rhg/src/commands/status.rs
+++ b/rust/rhg/src/commands/status.rs
@@ -33,8 +33,7 @@
 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
 use hg::Revision;
 use hg::{self, narrow, sparse};
-use log::info;
 use rayon::prelude::*;
 use std::io;
 use std::mem::take;
 use std::path::PathBuf;
@@ -37,7 +36,8 @@
 use rayon::prelude::*;
 use std::io;
 use std::mem::take;
 use std::path::PathBuf;
+use tracing::info;
 
 pub const HELP_TEXT: &str = "
 Show changed files in the working directory
@@ -252,6 +252,7 @@
     Ok(false)
 }
 
+#[tracing::instrument(level = "debug", skip_all, name = "rhg status")]
 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
     // TODO: lift these limitations
     if invocation
@@ -634,7 +635,7 @@
             // Not updating the dirstate is not ideal but not critical:
             // don’t keep our caller waiting until some other Mercurial
             // process releases the lock.
-            log::info!("not writing dirstate from `status`: lock is held")
+            tracing::info!("not writing dirstate from `status`: lock is held")
         }
         Err(LockError::Other(HgError::IoError { error, .. }))
             if error.kind() == io::ErrorKind::PermissionDenied
diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_cnVzdC9yaGcvc3JjL21haW4ucnM=..48dfba3df18ce7311ebc2eb22932589da8d0b253_cnVzdC9yaGcvc3JjL21haW4ucnM= 100644
--- a/rust/rhg/src/main.rs
+++ b/rust/rhg/src/main.rs
@@ -1,4 +1,3 @@
-extern crate log;
 use crate::error::CommandError;
 use crate::ui::Ui;
 use clap::{command, Arg, ArgMatches};
@@ -14,6 +13,12 @@
 use std::os::unix::prelude::CommandExt;
 use std::path::PathBuf;
 use std::process::Command;
+use tracing::{span, Level};
+#[cfg(feature = "full-tracing")]
+use tracing_chrome::{ChromeLayerBuilder, FlushGuard};
+#[cfg(not(feature = "full-tracing"))]
+use tracing_subscriber::fmt::format::FmtSpan;
+use tracing_subscriber::{prelude::*, EnvFilter};
 
 mod blackbox;
 mod color;
@@ -23,6 +28,7 @@
     pub mod path_utils;
 }
 
+#[tracing::instrument(level = "debug", skip_all)]
 fn main_with_result(
     argv: Vec<OsString>,
     process_start_time: &blackbox::ProcessStartTime,
@@ -30,6 +36,7 @@
     repo: Result<&Repo, &NoRepoInCwdError>,
     config: &Config,
 ) -> Result<(), CommandError> {
+    let setup_span = span!(Level::DEBUG, "CLI and command setup").entered();
     check_unsupported(config, repo)?;
 
     let app = command!()
@@ -118,6 +125,7 @@
     }
 
     if config.is_extension_enabled(b"blackbox") {
+        let blackbox_span = span!(Level::DEBUG, "blackbox").entered();
         let blackbox =
             blackbox::Blackbox::new(&invocation, process_start_time)?;
         blackbox.log_command_start(argv.iter());
@@ -121,4 +129,6 @@
         let blackbox =
             blackbox::Blackbox::new(&invocation, process_start_time)?;
         blackbox.log_command_start(argv.iter());
+        blackbox_span.exit();
+        setup_span.exit();
         let result = run(&invocation);
@@ -124,4 +134,5 @@
         let result = run(&invocation);
+        let blackbox_span = span!(Level::DEBUG, "blackbox").entered();
         blackbox.log_command_end(
             argv.iter(),
             exit_code(
@@ -133,5 +144,6 @@
                     .unwrap_or(false),
             ),
         );
+        blackbox_span.exit();
         result
     } else {
@@ -136,5 +148,6 @@
         result
     } else {
+        setup_span.exit();
         run(&invocation)
     }
 }
@@ -145,7 +158,10 @@
     // measurements. Reading config files can be slow if they’re on NFS.
     let process_start_time = blackbox::ProcessStartTime::now();
 
-    env_logger::init();
+    #[cfg(feature = "full-tracing")]
+    let chrome_layer_guard = setup_tracing();
+    #[cfg(not(feature = "full-tracing"))]
+    setup_tracing();
 
     // Make sure nothing in a future version of `rhg` sets the global
     // threadpool before we can cap default threads. (This is also called
@@ -156,8 +172,8 @@
 
     let early_args = EarlyArgs::parse(&argv);
 
-    let initial_current_dir = early_args.cwd.map(|cwd| {
-        let cwd = get_path_from_bytes(&cwd);
+    let initial_current_dir = early_args.cwd.as_ref().map(|cwd| {
+        let cwd = get_path_from_bytes(cwd);
         std::env::current_dir()
             .and_then(|initial| {
                 std::env::set_current_dir(cwd)?;
@@ -179,6 +195,128 @@
             })
     });
 
+    let (non_repo_config, repo_path) =
+        config_setup(&argv, early_args, &initial_current_dir);
+
+    let repo_span = span!(Level::DEBUG, "repo setup").entered();
+
+    let simple_exit =
+        |ui: &Ui, config: &Config, result: Result<(), CommandError>| -> ! {
+            exit(
+                &argv,
+                &initial_current_dir,
+                ui,
+                OnUnsupported::from_config(config),
+                result,
+                // TODO: show a warning or combine with original error if
+                // `get_bool` returns an error
+                non_repo_config
+                    .get_bool(b"ui", b"detailed-exit-code")
+                    .unwrap_or(false),
+            )
+        };
+    let early_exit = |config: &Config, error: CommandError| -> ! {
+        simple_exit(&Ui::new_infallible(config), config, Err(error))
+    };
+    let repo_result = match Repo::find(&non_repo_config, repo_path.to_owned())
+    {
+        Ok(repo) => Ok(repo),
+        Err(RepoError::NotFound { at }) if repo_path.is_none() => {
+            // Not finding a repo is not fatal yet, if `-R` was not given
+            Err(NoRepoInCwdError { cwd: at })
+        }
+        Err(error) => early_exit(&non_repo_config, error.into()),
+    };
+
+    let config = if let Ok(repo) = &repo_result {
+        repo.config()
+    } else {
+        &non_repo_config
+    };
+
+    let mut config_cow = Cow::Borrowed(config);
+    config_cow.to_mut().apply_plain(PlainInfo::from_env());
+    if !ui::plain(Some("tweakdefaults"))
+        && config_cow
+            .as_ref()
+            .get_bool(b"ui", b"tweakdefaults")
+            .unwrap_or_else(|error| early_exit(config, error.into()))
+    {
+        config_cow.to_mut().tweakdefaults()
+    };
+    let config = config_cow.as_ref();
+    let ui = Ui::new(config)
+        .unwrap_or_else(|error| early_exit(config, error.into()));
+
+    if let Ok(true) = config.get_bool(b"rhg", b"fallback-immediately") {
+        exit(
+            &argv,
+            &initial_current_dir,
+            &ui,
+            OnUnsupported::fallback(config),
+            Err(CommandError::unsupported(
+                "`rhg.fallback-immediately is true`",
+            )),
+            false,
+        )
+    }
+
+    repo_span.exit();
+    let result = main_with_result(
+        argv.iter().map(|s| s.to_owned()).collect(),
+        &process_start_time,
+        &ui,
+        repo_result.as_ref(),
+        config,
+    );
+
+    #[cfg(feature = "full-tracing")]
+    // The `Drop` implementation doesn't flush, probably because it would be
+    // too expensive in the general case? Not sure, but we want it.
+    chrome_layer_guard.flush();
+    #[cfg(feature = "full-tracing")]
+    // Explicitly run `drop` here to wait for the writing thread to join
+    // because `drop` may not be called when `std::process::exit` is called.
+    drop(chrome_layer_guard);
+    simple_exit(&ui, config, result)
+}
+
+#[cfg(feature = "full-tracing")]
+/// Enable an env-filtered chrome-trace logger to a file.
+/// Defaults to writing to `./trace-{unix epoch in micros}.json`, but can
+/// be overridden via the `HG_TRACE_PATH` environment variable.
+fn setup_tracing() -> FlushGuard {
+    let mut chrome_layer_builder = ChromeLayerBuilder::new();
+    // /!\ Keep in sync with hg-pyo3
+    if let Ok(path) = std::env::var("HG_TRACE_PATH") {
+        chrome_layer_builder = chrome_layer_builder.file(path);
+    }
+    let (chrome_layer, chrome_layer_guard) = chrome_layer_builder.build();
+    tracing_subscriber::registry()
+        .with(EnvFilter::from_default_env())
+        .with(chrome_layer)
+        .init();
+    chrome_layer_guard
+}
+
+#[cfg(not(feature = "full-tracing"))]
+/// Enable an env-filtered logger to stderr
+fn setup_tracing() {
+    let registry = tracing_subscriber::registry()
+        .with(tracing_subscriber::fmt::layer())
+        .with(EnvFilter::from_default_env());
+    let fmt_layer = tracing_subscriber::fmt::layer()
+        .with_writer(std::io::stderr)
+        .with_span_events(FmtSpan::CLOSE);
+    registry.with(fmt_layer).init()
+}
+
+#[tracing::instrument(level = "debug", skip_all)]
+fn config_setup(
+    argv: &[OsString],
+    early_args: EarlyArgs,
+    initial_current_dir: &Option<PathBuf>,
+) -> (Config, Option<PathBuf>) {
     let mut non_repo_config =
         Config::load_non_repo().unwrap_or_else(|error| {
             // Normally this is decided based on config, but we don’t have that
@@ -187,8 +325,8 @@
             let on_unsupported = OnUnsupported::Abort;
 
             exit(
-                &argv,
-                &initial_current_dir,
+                argv,
+                initial_current_dir,
                 &Ui::new_infallible(&Config::empty()),
                 on_unsupported,
                 Err(error.into()),
@@ -200,8 +338,8 @@
         .load_cli_args(early_args.config, early_args.color)
         .unwrap_or_else(|error| {
             exit(
-                &argv,
-                &initial_current_dir,
+                argv,
+                initial_current_dir,
                 &Ui::new_infallible(&non_repo_config),
                 OnUnsupported::from_config(&non_repo_config),
                 Err(error.into()),
@@ -219,8 +357,8 @@
         }
         if SCHEME_RE.is_match(repo_path_bytes) {
             exit(
-                &argv,
-                &initial_current_dir,
+                argv,
+                initial_current_dir,
                 &Ui::new_infallible(&non_repo_config),
                 OnUnsupported::from_config(&non_repo_config),
                 Err(CommandError::UnsupportedFeature {
@@ -302,76 +440,7 @@
                 .or_else(|| Some(get_path_from_bytes(&repo_arg).to_path_buf()))
         }
     };
-
-    let simple_exit =
-        |ui: &Ui, config: &Config, result: Result<(), CommandError>| -> ! {
-            exit(
-                &argv,
-                &initial_current_dir,
-                ui,
-                OnUnsupported::from_config(config),
-                result,
-                // TODO: show a warning or combine with original error if
-                // `get_bool` returns an error
-                non_repo_config
-                    .get_bool(b"ui", b"detailed-exit-code")
-                    .unwrap_or(false),
-            )
-        };
-    let early_exit = |config: &Config, error: CommandError| -> ! {
-        simple_exit(&Ui::new_infallible(config), config, Err(error))
-    };
-    let repo_result = match Repo::find(&non_repo_config, repo_path.to_owned())
-    {
-        Ok(repo) => Ok(repo),
-        Err(RepoError::NotFound { at }) if repo_path.is_none() => {
-            // Not finding a repo is not fatal yet, if `-R` was not given
-            Err(NoRepoInCwdError { cwd: at })
-        }
-        Err(error) => early_exit(&non_repo_config, error.into()),
-    };
-
-    let config = if let Ok(repo) = &repo_result {
-        repo.config()
-    } else {
-        &non_repo_config
-    };
-
-    let mut config_cow = Cow::Borrowed(config);
-    config_cow.to_mut().apply_plain(PlainInfo::from_env());
-    if !ui::plain(Some("tweakdefaults"))
-        && config_cow
-            .as_ref()
-            .get_bool(b"ui", b"tweakdefaults")
-            .unwrap_or_else(|error| early_exit(config, error.into()))
-    {
-        config_cow.to_mut().tweakdefaults()
-    };
-    let config = config_cow.as_ref();
-    let ui = Ui::new(config)
-        .unwrap_or_else(|error| early_exit(config, error.into()));
-
-    if let Ok(true) = config.get_bool(b"rhg", b"fallback-immediately") {
-        exit(
-            &argv,
-            &initial_current_dir,
-            &ui,
-            OnUnsupported::fallback(config),
-            Err(CommandError::unsupported(
-                "`rhg.fallback-immediately is true`",
-            )),
-            false,
-        )
-    }
-
-    let result = main_with_result(
-        argv.iter().map(|s| s.to_owned()).collect(),
-        &process_start_time,
-        &ui,
-        repo_result.as_ref(),
-        config,
-    );
-    simple_exit(&ui, config, result)
+    (non_repo_config, repo_path)
 }
 
 fn main() -> ! {
@@ -445,8 +514,8 @@
             ));
             on_unsupported = OnUnsupported::Abort
         } else {
-            log::debug!("falling back (see trace-level log)");
-            log::trace!("{}", String::from_utf8_lossy(message));
+            tracing::debug!("falling back (see trace-level log)");
+            tracing::trace!("{}", String::from_utf8_lossy(message));
             if let Err(err) = which::which(executable_path) {
                 exit_no_fallback(
                     ui,
diff --git a/setup.py b/setup.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_c2V0dXAucHk=..48dfba3df18ce7311ebc2eb22932589da8d0b253_c2V0dXAucHk= 100644
--- a/setup.py
+++ b/setup.py
@@ -346,6 +346,11 @@
                     msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
                     print(msg, file=sys.stderr)
                 hgrustext = None
+            if hgrustext == "cpython":
+                logging.warning(
+                    "HGWITHRUSTEXT is deprecated and \
+                    will be removed in Mercurial 7.1"
+                )
             self.rust = hgrustext is not None
             self.no_rust = not self.rust
         return ret
@@ -1160,7 +1165,7 @@
 
             env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
 
-        # Wildy shooting in the dark to make sure rust-cpython use the right
+        # Wildy shooting in the dark to make sure rust-pyo3 use the right
         # python
         if not sys.executable:
             msg = "Cannot determine which Python to compile Rust for"
@@ -1286,11 +1291,6 @@
 if os.name != 'nt':
     extmodules += [
         RustStandaloneExtension(
-            'mercurial.rustext',
-            'hg-cpython',
-            'librusthg',
-        ),
-        RustStandaloneExtension(
             'mercurial.pyo3_rustext',
             'hg-pyo3',
             'librusthgpyo3',
diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_dGVzdHMvdGVzdC1wZXJzaXN0ZW50LW5vZGVtYXAudA==..48dfba3df18ce7311ebc2eb22932589da8d0b253_dGVzdHMvdGVzdC1wZXJzaXN0ZW50LW5vZGVtYXAudA== 100644
--- a/tests/test-persistent-nodemap.t
+++ b/tests/test-persistent-nodemap.t
@@ -73,24 +73,6 @@
 
 #endif
 
-#if rust
-
-Regression test for a previous bug in Rust/C FFI for the `Revlog_CAPI` capsule:
-in places where `mercurial/cext/revlog.c` function signatures use `Py_ssize_t`
-(64 bits on Linux x86_64), corresponding declarations in `rust/hg-cpython/src/cindex.rs`
-incorrectly used `libc::c_int` (32 bits).
-As a result, -1 passed from Rust for the null revision became 4294967295 in C.
-
-  $ hg log -r 00000000
-  changeset:   -1:000000000000
-  tag:         tip
-  user:        
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  
-
-#endif
-
-
   $ hg debugformat
   format-variant     repo
   fncache:            yes
diff --git a/tests/test-rust-ancestor.py b/tests/test-rust-ancestor.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_dGVzdHMvdGVzdC1ydXN0LWFuY2VzdG9yLnB5..48dfba3df18ce7311ebc2eb22932589da8d0b253_dGVzdHMvdGVzdC1ydXN0LWFuY2VzdG9yLnB5 100644
--- a/tests/test-rust-ancestor.py
+++ b/tests/test-rust-ancestor.py
@@ -28,16 +28,6 @@
     good enough.
 
     Algorithmic correctness is asserted by the Rust unit tests.
-
-    At this point, we have two sets of bindings, in `hg-cpython` and
-    `hg-pyo3`. This class used to be for the first and now contains
-    the tests that are identical in both bindings. As of this writing,
-    there are more implementations in `hg-cpython` than `hg-pyo3`, hence
-    some more tests in the subclass for `hg-cpython`. When the work on PyO3
-    is complete, the subclasses for `hg-cpython` should have no specific
-    test left. Later on, when we remove the dead code in `hg-cpython`, the tests
-    should migrate from the mixin to the class for `hg-pyo3`, until we can
-    simply remove the mixin.
     """
 
     @classmethod
@@ -148,7 +138,6 @@
             AncestorsIterator(idx, [1], -1, False)
         exc = arc.exception
         self.assertIsInstance(exc, ValueError)
-        # rust-cpython issues appropriate str instances for Python 2 and 3
         self.assertEqual(exc.args, ('ParentOutOfRange', 1))
 
     def testwdirunsupported(self):
@@ -162,7 +151,6 @@
 
         exc = arc.exception
         self.assertIsInstance(exc, ValueError)
-        # rust-cpython issues appropriate str instances for Python 2 and 3
         self.assertEqual(exc.args, ('InvalidRevision', wdirrev))
 
     def testheadrevs(self):
diff --git a/tests/test-rust-discovery.py b/tests/test-rust-discovery.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_dGVzdHMvdGVzdC1ydXN0LWRpc2NvdmVyeS5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_dGVzdHMvdGVzdC1ydXN0LWRpc2NvdmVyeS5weQ== 100644
--- a/tests/test-rust-discovery.py
+++ b/tests/test-rust-discovery.py
@@ -3,9 +3,7 @@
 from mercurial import policy
 from mercurial.testing import revlog as revlogtesting
 
-PartialDiscovery = policy.importrust(
-    'discovery', member='PartialDiscovery', pyo3=True
-)
+PartialDiscovery = policy.importrust('discovery', member='PartialDiscovery')
 
 try:
     from mercurial.cext import parsers as cparsers
@@ -50,7 +48,7 @@
     "rustext or the C Extension parsers module "
     "discovery relies on is not available",
 )
-class rustdiscoverytest(revlogtesting.PyO3RevlogBasedTestBase):
+class rustdiscoverytest(revlogtesting.RustRevlogBasedTestBase):
     """Test the correctness of binding to Rust code.
 
     This test is merely for the binding to Rust itself: extraction of
diff --git a/tests/test-rust-revlog.py b/tests/test-rust-revlog.py
index 0471bf04ddf1231456a848d0965b3d05bf00bf8f_dGVzdHMvdGVzdC1ydXN0LXJldmxvZy5weQ==..48dfba3df18ce7311ebc2eb22932589da8d0b253_dGVzdHMvdGVzdC1ydXN0LXJldmxvZy5weQ== 100644
--- a/tests/test-rust-revlog.py
+++ b/tests/test-rust-revlog.py
@@ -7,5 +7,5 @@
 from mercurial import error
 
 try:
-    from mercurial import rustext
+    from mercurial import pyo3_rustext
 
@@ -11,3 +11,3 @@
 
-    rustext.__name__  # trigger immediate actual import
+    pyo3_rustext.__name__  # trigger immediate actual import
 except ImportError:
@@ -13,4 +13,4 @@
 except ImportError:
-    rustext = None
+    pyo3_rustext = None
 else:
     # this would fail already without appropriate ancestor.__package__
@@ -15,9 +15,9 @@
 else:
     # this would fail already without appropriate ancestor.__package__
-    from mercurial.rustext.ancestor import LazyAncestors
+    from mercurial.pyo3_rustext.ancestor import LazyAncestors
 
 from mercurial.testing import revlog as revlogtesting
 
 header = struct.unpack(">I", revlogtesting.data_non_inlined[:4])[0]
 
 
@@ -18,12 +18,11 @@
 
 from mercurial.testing import revlog as revlogtesting
 
 header = struct.unpack(">I", revlogtesting.data_non_inlined[:4])[0]
 
 
-class RustInnerRevlogTestMixin:
-    """Common tests for both Rust Python bindings."""
-
+# Conditional skipping done by the base class
+class RustInnerRevlogTest(revlogtesting.RustRevlogBasedTestBase):
     node_hex0 = b'd1f4bbb0befc13bd8cd39d0fcdd93b8c078c4a2f'
     node0 = node_bin(node_hex0)
     bogus_node_hex = b'cafe' * 10
@@ -170,13 +169,6 @@
             # well our data file does not even exist
             self.assertTrue(b"when reading Just a path/test.d" in exc.args[0])
 
-
-# Conditional skipping done by the base class
-class RustInnerRevlogTest(
-    revlogtesting.RustRevlogBasedTestBase, RustInnerRevlogTestMixin
-):
-    """For reference"""
-
     def test_ancestors(self):
         rustidx = self.parserustindex()
         lazy = LazyAncestors(rustidx, [3], 0, True)
@@ -197,13 +189,6 @@
         self.assertEqual(irl.canonical_index_file, b'test.i')
 
 
-# Conditional skipping done by the base class
-class PyO3InnerRevlogTest(
-    revlogtesting.PyO3RevlogBasedTestBase, RustInnerRevlogTestMixin
-):
-    """Testing new PyO3 bindings, by comparison with rust-cpython bindings."""
-
-
 if __name__ == '__main__':
     import silenttestrunner