Skip to content
Snippets Groups Projects
basic_commands.py 12.8 KiB
Newer Older
from __future__ import print_function

Boris Feld's avatar
Boris Feld committed
import os
import os.path
import time
from .utils import (
    BaseTestSuite,
Philippe Pepiot's avatar
Philippe Pepiot committed

class TestSuite(BaseTestSuite):
Boris Feld's avatar
Boris Feld committed

    timeout = 300
Boris Feld's avatar
Boris Feld committed

    params = BaseTestSuite.params + [("create", "update")]
    param_names = BaseTestSuite.param_names + ["mode"]

    @benchmark_name('simple_command.write.track_commit')
    @params_as_kwargs
    def track_commit(self, mode, **kwargs):
Boris Feld's avatar
Boris Feld committed
        timings = []
Boris Feld's avatar
Boris Feld committed

        # purge any uncommitted changes
        self.hg('revert', '--all')
        self.hg('--config', 'extensions.purge=', 'purge', '--all')
        # make sure dirstate is up to date.
        self.hg('status')

        if mode == 'create':
            with open(os.path.join(self.repo_path, 'BABAR'), 'w') as f:
                f.write("BABAR")
        else:
            # pick filename to update
            filename = self.hg('manifest').partition('\n')[0]
            if not filename:
                msg = 'no revision checked out in repo: %s' % self.repo_path
                raise ValueError(msg)
            filename = os.path.join(self.repo_path, filename)
Boris Feld's avatar
Boris Feld committed

        # Do the commits N time
            elapsed = time.time() - start
            if (elapsed >= 60) and i >= 3:
Boris Feld's avatar
Boris Feld committed
            try:
                if mode == 'update':
                    with open(filename, 'a') as target:
                        target.write(
                            'The quick brown fox jumps over the lazy dog\n')
                else:
                    self.hg('add', 'BABAR')

Boris Feld's avatar
Boris Feld committed
                before = time.time()
                self.hg('commit', '-m', 'My commit message',
                        '-u', '<test@octobus.net>')
Boris Feld's avatar
Boris Feld committed
                after = time.time()
                timings.append(after - before)
            finally:
                # Rollback and clean
                if needrollback:
                    self.hg('rollback', '--config', 'ui.rollback=true')
                self.hg('update', '-C', '.')
Boris Feld's avatar
Boris Feld committed

        return median(timings)


# class TimeTestSuite(BaseTestSuite):
# 
#     def time_emptystatus(self, *args, **kwargs):
#         self.hg('status')
# 
#     def time_status_tip(self, *args, **kwargs):
#         self.hg('status', '--change', 'tip')
# 
#     def time_emptydiff(self, *args, **kwargs):
#         self.hg('diff')
# 
#     def time_diff_tip(self, *args, **kwargs):
#         self.hg('diff', '-c', 'tip')
# 
#     def time_log_tip(self, *args, **kwargs):
#         self.hg("log", "-r", "tip")
# 
#     def time_summary(self, *args, **kwargs):
#         self.hg("summary")
# 
#     def time_version(self, *args, **kwargs):
#         self.hg("--version")
# 
#     def time_bookmarks(self, *args, **kwargs):
#         self.hg("bookmarks")
# 
#     def time_id(self, *args, **kwargs):
#         self.hg("id")
# 
#     def time_id_current(self, *args, **kwargs):
#         self.hg("id", "-r", ". ")
# 
#     def time_manifest_all(self, *args, **kwargs):
#         self.hg("manifest", "--all")
# 
#     def time_files(self, *args, **kwargs):
#         self.hg('files', '-r', 'tip')
# 
# 
# class ArchiveTimeTestSuite(BaseTestSuite):
#     # work-around repeat because mozilla central en netbeans are very slow
#     # mercurial's own archive is about a second so it would use more run.
# 
#     timeout = 300
#     param_names = TimeTestSuite.param_names + ['type']
#     # The "file" type have been disabled. It does not yield very different
#     # result than tar and taks a lot of time. creating and deleting many file
#     # is expensive compared to creating and deleting a single tar
#     #
#     # params = TimeTestSuite.params + [['files', 'tar']]
#     params = TimeTestSuite.params + [['tar']]
# 
#     def setup(self, *args, **kwargs):
#         super(ArchiveTimeTestSuite, self).setup(*args, **kwargs)
#         self.output_dir = tempfile.mkdtemp()
#         self.output = os.path.join(self.output_dir, 'archive')
# 
#     def teardown(self, *args, **kwargs):
#         shutil.rmtree(self.output_dir)
# 
#     @params_as_kwargs
#     def time_archive(self, repo, **kwargs):
#         # asv share the same temporary directory for all combinations
#         # so use an unique output name
#         self.hg('archive',
#                 '--type', kwargs["type"],
#                 '--rev', 'tip',
#                 self.output)
# 
# 
# class LogTimeTestSuite(BaseNChangesetsTestSuite):
# 
#     timeout = 300
# 
#     @params_as_kwargs
#     def time_log_history(self, repo, changesets, **kwargs):
#         self.hg("log", "-r", "-%d:" % changesets)
# 
# 
# class UpdateTimeTestSuite(BaseNChangesetsTestSuite):
# 
#     timeout = 500
# 
#     @params_as_kwargs
#     def time_up_tip(self, repo, changesets, **kwargs):
#         self.hg("up", "-r", "tip~%d" % changesets)
#         self.hg("up", "-r", "tip")
# 
# 
# class BundleTimeTestSuite(BaseNChangesetsTestSuite):
# 
#     timeout = 500
# 
#     @params_as_kwargs
#     def time_bundle(self, repo, changesets, **kwargs):
#         self.hg("bundle", "--base", ":(-%d)" % (changesets+1), "/tmp/bundle.bundle")
# 
# class ExchangeTimeSuite(BaseExchangeTimeSuite):
# 
#     def time_incoming(self, *args, **kwargs):
#         self.run(self.clone_path, 'incoming', self.repo_path)
# 
#     def time_outgoing(self, *args, **kwargs):
#         self.run(self.repo_path, 'outgoing', self.clone_path)
Philippe Pepiot's avatar
Philippe Pepiot committed

# class UnbundleTimeSuite(BaseExchangeTimeSuite):
#     params = BaseTestSuite.params + [
#         STRIP_VARIANTS_LIST]
#     param_names = BaseTestSuite.param_names + ['strip']
# 
#     @params_as_kwargs
#     def setup(self, repo, strip, **kwargs):
#         super(UnbundleTimeSuite, self).setup(repo, repo_type="local", strip=strip, revset=None, **kwargs)
# 
#         partial_sets = REPO_DETAILS[self.repo_name]['reference-repo']['partial-sets']
# 
#         if strip not in partial_sets:
#             # Only test for strip variants specified in the .benchrepo
#             raise NotImplementedError("Strip variant {} not used for repo {}".format(strip, repo))
# 
#         tmpdir = os.path.join(REPOS_DIR, 'runtime-clones')
#         try:
#             os.makedirs(tmpdir)
#         except OSError as exc:
#             if exc.errno != errno.EEXIST:
#                 raise
# 
#         self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
#             os.path.basename(self.clone_path)))
#         # XXX: This should be deleted at the end but teardown, like setup, is
#         # called for each repeat...
#         self._rsync(self.clone_path, self.tmp_clone_path)
#         # Wait for everything to be written on disk to avoid Disk IO wait to
#         # impact performances metrics
#         self.check_output('sync')
# 
#         partial_set = partial_sets[strip]
# 
#         if not partial_set:
#             raise NotImplementedError("No strip revset, ignoring.")
# 
#         strip_revset = partial_set['remove']
# 
#         self.hg("bundle", "--base", "not(%s)" % strip_revset, "/tmp/bundle.bundle")
# 
#     def time_debugunbundle(self, *args, **kwargs):
#         self.run(self.tmp_clone_path, 'unbundle', "/tmp/bundle.bundle",
#                  expected_return_code=0)
# 
# 
# class BasePushPullTimeSuite(BaseExchangeTimeSuite):
# 
#     # Force setup to be called between two push or pull
#     warmup_time = 0
# 
#     def setup(self, *args, **kwargs):
#         super(BasePushPullTimeSuite, self).setup(*args, **kwargs)
#         tmpdir = os.path.join(REPOS_DIR, 'runtime-clones')
#         try:
#             os.makedirs(tmpdir)
#         except OSError as exc:
#             if exc.errno != errno.EEXIST:
#                 raise
#         self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
#             os.path.basename(self.clone_path)))
#         # XXX: This should be deleted at the end but teardown, like setup, is
#         # called for each repeat...
#         self._rsync(self.clone_path, self.tmp_clone_path)
#         # Wait for everything to be written on disk to avoid Disk IO wait to
#         # impact performances metrics
#         self.check_output('sync')
# 
#     def _time_push(self, *args, **kwargs):
#         self.run(self.repo_path, ['push', '-f'], self.tmp_clone_path)
# 
#     def _time_pull(self, *args, **kwargs):
#         self.run(self.tmp_clone_path, 'pull', self.repo_path,
#                  expected_return_code=0)
# 
# class NoOpPushPullTimeSuite(BasePushPullTimeSuite):
# 
#     params = BaseTestSuite.params + [
#         ['local', 'ssh', 'http'],
#         ["same"], # Force the strip variant to same
#         [None, 'tip']]
# 
#     def time_push(self, *args, **kwargs):
#         self._time_push()
# 
#     def time_pull(self, *args, **kwargs):
#         self._time_pull()
# 
# 
# class SmallPushPullTimeSuite(BasePushPullTimeSuite):
# 
#     params = BaseTestSuite.params + [
#         ['local', 'ssh', 'http'],
#         ["last-ten", "last-hundred"], # Only the usual strip sizes
#         [None, 'tip']]
# 
#     def time_push(self, *args, **kwargs):
#         self._time_push()
# 
#     def time_pull(self, *args, **kwargs):
#         self._time_pull()
# 
# class BigPushPullTimeSuite(BasePushPullTimeSuite):
# 
#     params = BaseTestSuite.params + [
#         ['local', 'ssh', 'http'],
#         ["last-thousand"], # All the big strip variants
#         [None, 'tip']]
# 
#     def time_push(self, *args, **kwargs):
#         self._time_push()
# 
#     def time_pull(self, *args, **kwargs):
#         self._time_pull()
# class CloneTimeSuite(BaseExchangeMixin, BaseTestSuite):
#     param_names = BaseTestSuite.param_names + ['repo_type', 'revset']
#     params = BaseTestSuite.params + [['local', 'ssh', 'http'], [None, 'tip']]

#     timer = timeit.default_timer
#     number = 1
#     warmup_time = 0
#     timeout = 2700

#     def setup(self, *args, **kwargs):
#         args = list(args)
#         repo_name = args[self.param_names.index('repo')]
#         revset = args.pop(-1)
#         repo_type = args.pop(-1)
#         super(CloneTimeSuite, self).setup(*args, **kwargs)
#         self._cleanup_paths = []
#         self._setup_repo_type(repo_type)
#         self._setup_revset(revset)
#         self.clone_path = os.path.join(REPOS_DIR, repo_name)
#         if repo_name in ('mercurial-2017', 'pypy-2017'):
#             # small repositories
#             # run 20 times or less than 5 minutes
#             self.repeat = 20
#             self.sample_time = (5 * 60) / (self.repeat * 1.3)
#         elif repo_name in ('mozilla-central-2017', 'netbeans-2017'):
#             # big repositories
#             # run 3 times or less than 45 minutes
#             self.repeat = 3
#             self.sample_time = (45 * 60) / (self.repeat * 1.3)
#         else:
#             raise NotImplementedError('unconfigured timeout for repository %s' % repo_name)

#     def teardown(self, *args, **kwargs):
#         self._teardown_repo_type()
#         for path in self._cleanup_paths:
#             shutil.rmtree(path, ignore_errors=True)
#         self._cleanup_paths = []
#
#    def time_clone(self, *args, **kwargs):
#        cmd = ['clone', '--pull']
#        tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
#        self._cleanup_paths.append(tmp_clone_path)
#        cmd.extend(self._remote_path_cmd(self.clone_path))
#        cmd.append(tmp_clone_path)
#        if self.rev:
#            cmd.extend(['-r', self.rev])
#        self.hg(*cmd)

# class CloneStreamTimeSuite(BaseExchangeMixin, BaseTestSuite):
#     param_names = BaseTestSuite.param_names + ['repo_type', 'update']
#     params = BaseTestSuite.params + [['local', 'ssh', 'http'], [False, True]]

#     timer = timeit.default_timer
#     # run 5 times or less than 15 minutes
#     number = 1
#     repeat = 5
#     warmup_time = 0
#     sample_time = (15 * 60) / (repeat * 1.3)
#     timeout = 2700

#    def setup(self, *args, **kwargs):
#        args = list(args)
#        repo_name = args[self.param_names.index('repo')]
#        self.update = args.pop(-1)
#        repo_type = args.pop(-1)
#        super(CloneStreamTimeSuite, self).setup(*args, **kwargs)
#        self._cleanup_paths = []
#        self._setup_repo_type(repo_type)
#        self.clone_path = os.path.join(REPOS_DIR, repo_name)

#     def teardown(self, *args, **kwargs):
#         self._teardown_repo_type()
#         for path in self._cleanup_paths:
#             shutil.rmtree(path, ignore_errors=True)
#         self._cleanup_paths = []

#    def time_clone_stream(self, *args, **kwargs):
#        cmd = ['clone', '--stream']
#        tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
#        self._cleanup_paths.append(tmp_clone_path)
#        cmd.extend(self._remote_path_cmd(self.clone_path))
#        cmd.append(tmp_clone_path)
#        if not self.update:
#            cmd.append('--noupdate')
#        self.hg(*cmd)