Newer
Older
from __future__ import print_function
import errno
import shutil
import subprocess
from .utils import (
BaseNChangesetsTestSuite,
BaseTestSuite,
params_as_kwargs,
median,
REPOS_DIR,
STRIP_VARIANTS_LIST,
not_compatible_with,
if sys.version_info[0] == 2:
import Queue as queue
else:
import queue
class TestSuite(BaseTestSuite):
params = BaseTestSuite.params + [("create", "update")]
param_names = BaseTestSuite.param_names + ["mode"]
@params_as_kwargs
def track_commit(self, mode, **kwargs):
filename = None
self.hg('revert', '--all')
self.hg('--config', 'extensions.purge=', 'purge', '--all')
self.hg('status')
if mode == 'create':
with open(os.path.join(self.repo_path, 'BABAR'), 'w') as f:
f.write("BABAR")
else:
# pick filename to update
filename = self.hg('manifest').partition('\n')[0]
if not filename:
msg = 'no revision checked out in repo: %s' % self.repo_path
raise ValueError(msg)
filename = os.path.join(self.repo_path, filename)
for i in range(10):
needrollback = False
if mode == 'update':
with open(filename, 'a') as target:
target.write(
'The quick brown fox jumps over the lazy dog\n')
else:
self.hg('add', 'BABAR')
self.hg('commit', '-m', 'My commit message',
needrollback = True
after = time.time()
timings.append(after - before)
finally:
# Rollback and clean
if needrollback:
self.hg('rollback', '--config', 'ui.rollback=true')
self.hg('debugupdatecache')
class TimeTestSuite(BaseTestSuite):
def time_manifest_all(self, *args, **kwargs):
def time_files(self, *args, **kwargs):
self.hg('files', '-r', 'tip')
class ArchiveTimeTestSuite(BaseTestSuite):
# work-around repeat because mozilla central en netbeans are very slow
# mercurial's own archive is about a second so it would use more run.
timeout = 300
param_names = TimeTestSuite.param_names + ['type']
# The "file" type have been disabled. It does not yield very different
# result than tar and taks a lot of time. creating and deleting many file
# is expensive compared to creating and deleting a single tar
#
# params = TimeTestSuite.params + [['files', 'tar']]
params = TimeTestSuite.params + [['tar']]
def setup(self, *args, **kwargs):
super(ArchiveTimeTestSuite, self).setup(*args, **kwargs)
self.output_dir = tempfile.mkdtemp()
self.output = os.path.join(self.output_dir, 'archive')
def teardown(self, *args, **kwargs):
shutil.rmtree(self.output_dir)
@params_as_kwargs
def time_archive(self, repo, **kwargs):
# asv share the same temporary directory for all combinations
# so use an unique output name
class LogTimeTestSuite(BaseNChangesetsTestSuite):
@params_as_kwargs
def time_log_history(self, repo, changesets, **kwargs):
self.hg("log", "-r", "-%d:" % changesets)
class UpdateTimeTestSuite(BaseNChangesetsTestSuite):
@params_as_kwargs
def time_up_tip(self, repo, changesets, **kwargs):
self.hg("up", "-r", "tip~%d" % changesets)
Philippe Pepiot
committed
self.hg("up", "-r", "tip")
class BundleTimeTestSuite(BaseNChangesetsTestSuite):
@params_as_kwargs
def time_bundle(self, repo, changesets, **kwargs):
self.hg("bundle", "--base", ":(-%d)" % (changesets+1), "/tmp/bundle.bundle")
class HgWeb(object):
def __init__(self):
super(HgWeb, self).__init__()
self.queue = queue.Queue()
self.proc = None
self.thread = None
def start(self, hgpath, environ):
config = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir, 'hgweb.config'))
hgpath, 'serve', '--cwd', REPOS_DIR,
'-a', 'localhost', '-p', '0',
'--config', 'web.push_ssl=False',
'--config', 'web.allow_push=*',
'--webdir-conf', config]
self.proc = subprocess.Popen(hgweb_cmd, env=environ,
stdout=subprocess.PIPE)
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
# we have to read output in a thread to avoid deadlocks
self.thread = threading.Thread(
target=self._enqueue, args=(self.queue, self.proc.stdout))
self.thread.daemon = True
self.thread.start()
# wait the server to be started
statusline = self.queue.get()
if not statusline:
self.stop()
raise RuntimeError('hg serve has crashed')
return re.search(':(\d+)', statusline).groups()[0]
@staticmethod
def _enqueue(queue, fd):
while True:
data = fd.readline()
if not data:
break
queue.put(data)
queue.put(None)
def stop(self):
self.proc.kill()
self.proc.wait()
self.thread.join()
class BaseExchangeMixin(object):
# this help recovering from failure during setup
_hgserve = None
def _remote_path_cmd(self, path):
if self.repo_type == 'local':
return [path]
elif self.repo_type == 'ssh':
with open('hg_wrapper', 'wb') as f:
f.write('#!/bin/sh\nexec env -i {} {} $*\n'.format(
' '.join(['{}={}'.format(k, pipes.quote(v))
for k, v in self.environ.items()]),
os.path.abspath(self.hgpath)))
st = os.stat('hg_wrapper')
os.chmod('hg_wrapper', st.st_mode | stat.S_IEXEC)
'--remotecmd', os.path.abspath('hg_wrapper'),
'ssh://localhost/{}'.format(os.path.abspath(path))]
elif self.repo_type == 'http':
path = os.path.abspath(path)
repo_dir = os.path.abspath(REPOS_DIR)
assert path.startswith(REPOS_DIR), path
return ['http://localhost:{}/{}'.format(
self.hgport, path[len(repo_dir) + 1:])]
else:
raise NotImplementedError
def _setup_repo_type(self, repo_type):
"""setup a hgweb server if we have to"""
self._hgserve = None
if repo_type == 'http' and self.get_asv_rev() in self.get_skip()['hgweb']:
raise NotImplementedError
self.repo_type = repo_type
if repo_type == 'http':
self._hgserve = HgWeb()
self.hgport = self._hgserve.start(self.hgpath, self.environ)
def _teardown_repo_type(self):
if self._hgserve is not None:
self._hgserve.stop()
self._hgserve = None
def _setup_revset(self, revset):
"""If the operation target a specific revision, resolve it beforehand"""
if revset is not None:
self.rev = self.hg('identify', '-i', '-r', revset).strip()
else:
self.rev = None
class classproperty(object):
def __init__(self, f):
self.f = f
def __get__(self, obj, owner):
return self.f(owner)
def setup_role(cls):
"""install the right partial variants for the configured action"""
cls.params = cls.params[:]
cls.params[cls._partials_idx] = list(sorted(cls.role_data))
return cls
class BaseExchangeTimeSuite(BaseExchangeMixin, BaseTestSuite):
# the exchange action we measure
role_action = None
# the subtypes of this action we measure
role_subtype = None
param_names = BaseTestSuite.param_names + [
'repo_type', 'strip', 'revset']
params = BaseTestSuite.params + [['local', 'ssh', 'http']]
_partials_idx = len(params)
params += [STRIP_VARIANTS_LIST]
params += [[None, 'tip']]
@classproperty
def role_data(cls):
"""{"partial-key" -> {"repo-key" -> {data}} map for the current role"""
if cls.role_action is None:
return None
if cls.role_subtype is None:
return None
return ROLES.get(cls.role_action, {}).get(cls.role_subtype, {})
def run(self, local_repo, command, remote_repo, expected_return_code=None):
if not isinstance(command, (list, tuple)):
command = [command]
cmd = ['--cwd', pipes.quote(local_repo)] + command
cmd.extend(self._remote_path_cmd(remote_repo))
if self.rev:
cmd.extend(['-r', self.rev])
if expected_return_code is None:
expected_return_code = 1 if self.partial_key == "same" else 0
self.hg(*cmd, expected_return_code=expected_return_code)
@params_as_kwargs
def setup(self, repo, repo_type, strip, revset, **kwargs):
self.partial_key = strip
super(BaseExchangeTimeSuite, self).setup(repo, **kwargs)
self._setup_repo_type(repo_type)
self._setup_revset(revset)
@property
def clone_path(self):
return self.repo_path_from_id(self.partial_key)
def repo_path_from_id(self, partial_id):
"""Return the absolute path for a given partial
"reference" is a special value that means the original repository.
"""
if partial_id == 'reference':
return self.repo_path
suffix = urllib.quote_plus(partial_id)
# We need to use the repo name here because the repo doesn't contains
# the hash
partial_name = '{}-partial-{}'.format(self.repo_name, suffix)
return os.path.join(REPOS_DIR, 'partial-references', partial_name)
def teardown(self, *args, **kwargs):
self._teardown_repo_type()
def _rsync(self, src, dst):
cmd = [
'rsync',
'--inplace',
'--no-whole-file',
'-aH',
'--delete',
'{}/'.format(src),
dst,
]
self.check_output(cmd)
class ExchangeTimeSuite(BaseExchangeTimeSuite):
def time_incoming(self, *args, **kwargs):
self.run(self.clone_path, 'incoming', self.repo_path)
def time_outgoing(self, *args, **kwargs):
self.run(self.repo_path, 'outgoing', self.clone_path)
# https://bz.mercurial-scm.org/show_bug.cgi?id=5851
# long timeout and process leak
not_broken_hgweb = not_compatible_with(
revset="f0a851542a05::877185de62^",
filter_fn=lambda kwargs, current_version: kwargs['repo_type'] == 'http'
)
class BaseDiscoveryTimeSuite(BaseExchangeTimeSuite):
# debugdiscovery does not support revset argument
params = BaseTestSuite.params + [
STRIP_VARIANTS_LIST,
def _track_discovery(self, *args, **kwargs):
data = self.role_data.get(kwargs['strip'], {})
data = data.get(self.repo_name)
if data is None:
raise NotImplementedError("no roles data for this partials' key")
source = self.repo_path_from_id(data['source'])
target = self.repo_path_from_id(data['target'])
return self.perfext('perfdiscovery', '--repository', source, target)
@setup_role
class DiscoveryIdenticalTimeSuite(BaseDiscoveryTimeSuite):
role_action = 'discovery'
role_subtype = 'identical'
@params_as_kwargs
@not_broken_hgweb
def track_identical(self, *args, **kwargs):
return self._track_discovery(self, *args, **kwargs)
track_identical.benchmark_name = 'exchange.discovery.changesets.track_identical'
@setup_role
class DiscoverySubsetTimeSuite(BaseDiscoveryTimeSuite):
role_action = 'discovery'
role_subtype = 'subset'
@params_as_kwargs
@not_broken_hgweb
def track_discovery_subset(self, *args, **kwargs):
return self._track_discovery(self, *args, **kwargs)
track_discovery_subset.benchmark_name = 'basic_commands.DiscoveryTimeSuite.track_discovery_subset'
@setup_role
class DiscoverySupersetTimeSuite(BaseDiscoveryTimeSuite):
role_action = 'discovery'
role_subtype = 'superset'
@params_as_kwargs
@not_broken_hgweb
def track_discovery_superset(self, *args, **kwargs):
return self._track_discovery(self, *args, **kwargs)
track_discovery_superset.benchmark_name = 'basic_commands.DiscoveryTimeSuite.track_discovery_superset'
@setup_role
class DiscoveryStandardTimeSuite(BaseDiscoveryTimeSuite):
role_action = 'discovery'
role_subtype = 'balanced'
@params_as_kwargs
@not_broken_hgweb
def track_balanced(self, *args, **kwargs):
return self._track_discovery(self, *args, **kwargs)
track_balanced.benchmark_name = 'exchange.discovery.changesets.track_balanced'
class UnbundleTimeSuite(BaseExchangeTimeSuite):
params = BaseTestSuite.params + [
STRIP_VARIANTS_LIST]
param_names = BaseTestSuite.param_names + ['strip']
@params_as_kwargs
def setup(self, repo, strip, **kwargs):
super(UnbundleTimeSuite, self).setup(repo, repo_type="local", strip=strip, revset=None, **kwargs)
partial_sets = REPO_DETAILS[self.repo_name]['reference-repo']['partial-sets']
if strip not in partial_sets:
# Only test for strip variants specified in the .benchrepo
raise NotImplementedError("Strip variant {} not used for repo {}".format(strip, repo))
tmpdir = os.path.join(REPOS_DIR, 'runtime-clones')
try:
os.makedirs(tmpdir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
os.path.basename(self.clone_path)))
# XXX: This should be deleted at the end but teardown, like setup, is
# called for each repeat...
self._rsync(self.clone_path, self.tmp_clone_path)
# Wait for everything to be written on disk to avoid Disk IO wait to
# impact performances metrics
self.check_output('sync')
partial_set = partial_sets[strip]
if not partial_set:
raise NotImplementedError("No strip revset, ignoring.")
strip_revset = partial_set['remove']
self.hg("bundle", "--base", "not(%s)" % strip_revset, "/tmp/bundle.bundle")
def time_debugunbundle(self, *args, **kwargs):
self.run(self.tmp_clone_path, 'unbundle', "/tmp/bundle.bundle",
class BasePushPullTimeSuite(BaseExchangeTimeSuite):
# Force setup to be called between two push or pull
warmup_time = 0
def setup(self, *args, **kwargs):
super(BasePushPullTimeSuite, self).setup(*args, **kwargs)
tmpdir = os.path.join(REPOS_DIR, 'runtime-clones')
try:
os.makedirs(tmpdir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
os.path.basename(self.clone_path)))
# XXX: This should be deleted at the end but teardown, like setup, is
# called for each repeat...
self._rsync(self.clone_path, self.tmp_clone_path)
# Wait for everything to be written on disk to avoid Disk IO wait to
# impact performances metrics
self.check_output('sync')
def _time_push(self, *args, **kwargs):
self.run(self.repo_path, ['push', '-f'], self.tmp_clone_path)
def _time_pull(self, *args, **kwargs):
self.run(self.tmp_clone_path, 'pull', self.repo_path,
expected_return_code=0)
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
class NoOpPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["same"], # Force the strip variant to same
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
class SmallPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["last-ten", "last-hundred"], # Only the usual strip sizes
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
class BigPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["last-thousand"], # All the big strip variants
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
# class CloneTimeSuite(BaseExchangeMixin, BaseTestSuite):
# param_names = BaseTestSuite.param_names + ['repo_type', 'revset']
# params = BaseTestSuite.params + [['local', 'ssh', 'http'], [None, 'tip']]
# timer = timeit.default_timer
# number = 1
# warmup_time = 0
# timeout = 2700
# def setup(self, *args, **kwargs):
# args = list(args)
# repo_name = args[self.param_names.index('repo')]
# revset = args.pop(-1)
# repo_type = args.pop(-1)
# super(CloneTimeSuite, self).setup(*args, **kwargs)
# self._cleanup_paths = []
# self._setup_repo_type(repo_type)
# self._setup_revset(revset)
# self.clone_path = os.path.join(REPOS_DIR, repo_name)
# if repo_name in ('mercurial-2017', 'pypy-2017'):
# # small repositories
# # run 20 times or less than 5 minutes
# self.repeat = 20
# self.sample_time = (5 * 60) / (self.repeat * 1.3)
# elif repo_name in ('mozilla-central-2017', 'netbeans-2017'):
# # big repositories
# # run 3 times or less than 45 minutes
# self.repeat = 3
# self.sample_time = (45 * 60) / (self.repeat * 1.3)
# else:
# raise NotImplementedError('unconfigured timeout for repository %s' % repo_name)
# def teardown(self, *args, **kwargs):
# self._teardown_repo_type()
# for path in self._cleanup_paths:
# shutil.rmtree(path, ignore_errors=True)
# self._cleanup_paths = []
#
# def time_clone(self, *args, **kwargs):
# cmd = ['clone', '--pull']
# tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
# self._cleanup_paths.append(tmp_clone_path)
# cmd.extend(self._remote_path_cmd(self.clone_path))
# cmd.append(tmp_clone_path)
# if self.rev:
# cmd.extend(['-r', self.rev])
# self.hg(*cmd)
# class CloneStreamTimeSuite(BaseExchangeMixin, BaseTestSuite):
# param_names = BaseTestSuite.param_names + ['repo_type', 'update']
# params = BaseTestSuite.params + [['local', 'ssh', 'http'], [False, True]]
# timer = timeit.default_timer
# # run 5 times or less than 15 minutes
# number = 1
# repeat = 5
# warmup_time = 0
# sample_time = (15 * 60) / (repeat * 1.3)
# timeout = 2700
# def setup(self, *args, **kwargs):
# args = list(args)
# repo_name = args[self.param_names.index('repo')]
# self.update = args.pop(-1)
# repo_type = args.pop(-1)
# super(CloneStreamTimeSuite, self).setup(*args, **kwargs)
# self._cleanup_paths = []
# self._setup_repo_type(repo_type)
# self.clone_path = os.path.join(REPOS_DIR, repo_name)
# def teardown(self, *args, **kwargs):
# self._teardown_repo_type()
# for path in self._cleanup_paths:
# shutil.rmtree(path, ignore_errors=True)
# self._cleanup_paths = []
# def time_clone_stream(self, *args, **kwargs):
# cmd = ['clone', '--stream']
# tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
# self._cleanup_paths.append(tmp_clone_path)
# cmd.extend(self._remote_path_cmd(self.clone_path))
# cmd.append(tmp_clone_path)
# if not self.update:
# cmd.append('--noupdate')
# self.hg(*cmd)