Newer
Older
from __future__ import print_function
import errno
import shutil
import subprocess
from .utils import (
BaseNChangesetsTestSuite,
BaseTestSuite,
params_as_kwargs,
median,
REPOS_DIR,
STRIP_VARIANTS,
if sys.version_info[0] == 2:
import Queue as queue
else:
import queue
STRIP_REVSETS = {}
for key, value in STRIP_VARIANTS.items():
if value is None:
# not sure why tip is the expected value here
STRIP_REVSETS[key] = 'tip'
else:
STRIP_REVSETS[key] = value['remove']
def get_strip_variants():
return ["same", "last-ten", "last-hundred", "last-thousand"]
def get_strip_revset(strip):
return STRIP_REVSETS[strip]
class TestSuite(BaseTestSuite):
params = BaseTestSuite.params + [("create", "update")]
param_names = BaseTestSuite.param_names + ["mode"]
@params_as_kwargs
def track_commit(self, mode, **kwargs):
filename = None
self.hg('status')
if mode == 'create':
with open(os.path.join(self.repo_path, 'BABAR'), 'w') as f:
f.write("BABAR")
else:
# pick filename to update
filename = self.hg('manifest').partition('\n')[0]
filename = os.path.join(self.repo_path, filename)
for i in range(10):
if mode == 'update':
with open(filename, 'a') as target:
target.write(
'The quick brown fox jumps over the lazy dog\n')
else:
self.hg('add', 'BABAR')
self.hg('commit', '-m', 'My commit message',
after = time.time()
timings.append(after - before)
finally:
# Rollback and clean
self.hg('rollback', '--config', 'ui.rollback=true')
self.hg('debugupdatecache')
class TimeTestSuite(BaseTestSuite):
def time_manifest_all(self, *args, **kwargs):
def time_files(self, *args, **kwargs):
self.hg('files', '-r', 'tip')
class ArchiveTimeTestSuite(BaseTestSuite):
# work-around repeat because mozilla central en netbeans are very slow
# mercurial's own archive is about a second so it would use more run.
timeout = 300
param_names = TimeTestSuite.param_names + ['type']
# The "file" type have been disabled. It does not yield very different
# result than tar and taks a lot of time. creating and deleting many file
# is expensive compared to creating and deleting a single tar
#
# params = TimeTestSuite.params + [['files', 'tar']]
params = TimeTestSuite.params + [['tar']]
def setup(self, *args, **kwargs):
super(ArchiveTimeTestSuite, self).setup(*args, **kwargs)
self.output_dir = tempfile.mkdtemp()
self.output = os.path.join(self.output_dir, 'archive')
def teardown(self, *args, **kwargs):
shutil.rmtree(self.output_dir)
@params_as_kwargs
def time_archive(self, repo, **kwargs):
# asv share the same temporary directory for all combinations
# so use an unique output name
class LogTimeTestSuite(BaseNChangesetsTestSuite):
@params_as_kwargs
def time_log_history(self, repo, changesets, **kwargs):
self.hg("log", "-r", "-%d:" % changesets)
class UpdateTimeTestSuite(BaseNChangesetsTestSuite):
def time_up_tip(self, repo, n):
Philippe Pepiot
committed
self.hg("up", "-r", "tip~%d" % n)
self.hg("up", "-r", "tip")
class BundleTimeTestSuite(BaseNChangesetsTestSuite):
timeout = 500
def time_bundle(self, repo, n):
Philippe Pepiot
committed
self.hg("bundle", "--base", ":(-%d)" % (n+1), "/tmp/bundle.bundle")
class HgWeb(object):
def __init__(self):
super(HgWeb, self).__init__()
self.queue = queue.Queue()
self.proc = None
self.thread = None
def start(self, hgpath, environ):
config = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir, 'hgweb.config'))
hgpath, 'serve', '--cwd', REPOS_DIR,
'-a', 'localhost', '-p', '0',
'--config', 'web.push_ssl=False',
'--config', 'web.allow_push=*',
'--webdir-conf', config]
self.proc = subprocess.Popen(hgweb_cmd, env=environ,
stdout=subprocess.PIPE)
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
# we have to read output in a thread to avoid deadlocks
self.thread = threading.Thread(
target=self._enqueue, args=(self.queue, self.proc.stdout))
self.thread.daemon = True
self.thread.start()
# wait the server to be started
statusline = self.queue.get()
if not statusline:
self.stop()
raise RuntimeError('hg serve has crashed')
return re.search(':(\d+)', statusline).groups()[0]
@staticmethod
def _enqueue(queue, fd):
while True:
data = fd.readline()
if not data:
break
queue.put(data)
queue.put(None)
def stop(self):
self.proc.kill()
self.proc.wait()
self.thread.join()
class BaseExchangeMixin(object):
def _remote_path_cmd(self, path):
if self.repo_type == 'local':
return [path]
elif self.repo_type == 'ssh':
with open('hg_wrapper', 'wb') as f:
f.write('#!/bin/sh\nexec env -i {} {} $*\n'.format(
' '.join(['{}={}'.format(k, pipes.quote(v))
for k, v in self.environ.items()]),
os.path.abspath(self.hgpath)))
st = os.stat('hg_wrapper')
os.chmod('hg_wrapper', st.st_mode | stat.S_IEXEC)
'--remotecmd', os.path.abspath('hg_wrapper'),
'ssh://localhost/{}'.format(os.path.abspath(path))]
elif self.repo_type == 'http':
path = os.path.abspath(path)
repo_dir = os.path.abspath(REPOS_DIR)
assert path.startswith(REPOS_DIR), path
return ['http://localhost:{}/{}'.format(
self.hgport, path[len(repo_dir) + 1:])]
else:
raise NotImplementedError
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
def _setup_repo_type(self, repo_type):
self._hgserve = None
if repo_type == 'http' and self.get_asv_rev() in self.get_skip()['hgweb']:
raise NotImplementedError
self.repo_type = repo_type
if repo_type == 'http':
self._hgserve = HgWeb()
self.hgport = self._hgserve.start(self.hgpath, self.environ)
def _teardown_repo_type(self):
if self._hgserve is not None:
self._hgserve.stop()
self._hgserve = None
def _setup_revset(self, revset):
if revset is not None:
self.rev = self.hg('identify', '-i', '-r', revset).strip()
else:
self.rev = None
class BaseExchangeTimeSuite(BaseExchangeMixin, BaseTestSuite):
param_names = BaseTestSuite.param_names + [
'repo_type', 'strip', 'revset']
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
get_strip_variants(),
[None, 'tip']]
timeout = 1800
def run(self, local_repo, command, remote_repo, expected_return_code=None):
if not isinstance(command, (list, tuple)):
command = [command]
cmd = ['--cwd', pipes.quote(local_repo)] + command
cmd.extend(self._remote_path_cmd(remote_repo))
if self.rev:
cmd.extend(['-r', self.rev])
if expected_return_code is None:
expected_return_code = 1 if self.strip == "same" else 0
self.hg(*cmd, expected_return_code=expected_return_code)
def setup(self, repo_name, *args):
args = list(args)
revset = args.pop(-1)
self.strip = args.pop(-1)
repo_type = args.pop(-1)
super(BaseExchangeTimeSuite, self).setup(repo_name, *args)
self._setup_repo_type(repo_type)
self._setup_revset(revset)
repo_suffix = urllib.quote_plus(self.strip)
self.clone_path = os.path.join(REPOS_DIR, '.cache', '{}-partial-{}'.format(
repo_name, repo_suffix))
def teardown(self, *args, **kwargs):
self._teardown_repo_type()
class ExchangeTimeSuite(BaseExchangeTimeSuite):
def time_incoming(self, *args, **kwargs):
self.run(self.clone_path, 'incoming', self.repo_path)
def time_outgoing(self, *args, **kwargs):
self.run(self.repo_path, 'outgoing', self.clone_path)
class DiscoveryTimeSuite(BaseExchangeTimeSuite):
# debugdiscovery does not support revset argument
params = BaseTestSuite.params + [
def time_debugdiscovery(self, *args, **kwargs):
self.run(self.clone_path, 'debugdiscovery', self.repo_path,
expected_return_code=0)
def track_discovery_subset(self, *args, **kwargs):
"""Local repository is a subset of remote."""
# self.hg() passes self.repo_path through --cwd, we override
# that with -R
return self.perfext('perfdiscovery',
'--repository', self.clone_path,
self.repo_path)
def track_discovery_superset(self, *args, **kwargs):
"""Local repository is a superset of remote."""
return self.perfext('perfdiscovery', self.clone_path)
class UnbundleTimeSuite(BaseExchangeTimeSuite):
params = BaseTestSuite.params + [
param_names = BaseTestSuite.param_names + ['strip']
def setup(self, repo_name, strip):
super(UnbundleTimeSuite, self).setup(repo_name, "local", strip, None)
tmpdir = os.path.join(REPOS_DIR, '.tmp')
try:
os.makedirs(tmpdir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
os.path.basename(self.clone_path)))
# XXX: This should be deleted at the end but teardown, like setup, is
# called for each repeat...
self.check_output('rsync', '--inplace', '--no-whole-file', '-aH', '--delete', '{}/'.format(self.clone_path),
self.tmp_clone_path)
# Wait for everything to be written on disk to avoid Disk IO wait to
# impact performances metrics
self.check_output('sync')
strip_revset = get_strip_revset(strip)
self.hg("bundle", "--base", "not(%s)" % strip_revset, "/tmp/bundle.bundle")
def time_debugunbundle(self, *args, **kwargs):
self.run(self.tmp_clone_path, 'unbundle', "/tmp/bundle.bundle",
class BasePushPullTimeSuite(BaseExchangeTimeSuite):
# Force setup to be called between two push or pull
warmup_time = 0
def setup(self, *args, **kwargs):
super(BasePushPullTimeSuite, self).setup(*args, **kwargs)
tmpdir = os.path.join(REPOS_DIR, '.tmp')
try:
os.makedirs(tmpdir)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.tmp_clone_path = os.path.join(tmpdir, 'clone-{}'.format(
os.path.basename(self.clone_path)))
# XXX: This should be deleted at the end but teardown, like setup, is
# called for each repeat...
self.check_output('rsync', '--inplace', '--no-whole-file', '-aH', '--delete', '{}/'.format(self.clone_path),
# Wait for everything to be written on disk to avoid Disk IO wait to
# impact performances metrics
self.check_output('sync')
def _time_push(self, *args, **kwargs):
self.run(self.repo_path, ['push', '-f'], self.tmp_clone_path)
def _time_pull(self, *args, **kwargs):
self.run(self.tmp_clone_path, 'pull', self.repo_path,
expected_return_code=0)
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
class NoOpPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["same"], # Force the strip variant to same
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
class SmallPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["last-ten", "last-hundred"], # Only the usual strip sizes
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
class BigPushPullTimeSuite(BasePushPullTimeSuite):
params = BaseTestSuite.params + [
['local', 'ssh', 'http'],
["last-thousand"], # All the big strip variants
[None, 'tip']]
def time_push(self, *args, **kwargs):
self._time_push()
def time_pull(self, *args, **kwargs):
self._time_pull()
# class CloneTimeSuite(BaseExchangeMixin, BaseTestSuite):
# param_names = BaseTestSuite.param_names + ['repo_type', 'revset']
# params = BaseTestSuite.params + [['local', 'ssh', 'http'], [None, 'tip']]
# timer = timeit.default_timer
# number = 1
# warmup_time = 0
# timeout = 2700
# def setup(self, *args, **kwargs):
# args = list(args)
# repo_name = args[self.param_names.index('repo')]
# revset = args.pop(-1)
# repo_type = args.pop(-1)
# super(CloneTimeSuite, self).setup(*args, **kwargs)
# self._cleanup_paths = []
# self._setup_repo_type(repo_type)
# self._setup_revset(revset)
# self.clone_path = os.path.join(REPOS_DIR, repo_name)
# if repo_name in ('mercurial-2017', 'pypy-2017'):
# # small repositories
# # run 20 times or less than 5 minutes
# self.repeat = 20
# self.sample_time = (5 * 60) / (self.repeat * 1.3)
# elif repo_name in ('mozilla-central-2017', 'netbeans-2017'):
# # big repositories
# # run 3 times or less than 45 minutes
# self.repeat = 3
# self.sample_time = (45 * 60) / (self.repeat * 1.3)
# else:
# raise NotImplementedError('unconfigured timeout for repository %s' % repo_name)
# def teardown(self, *args, **kwargs):
# self._teardown_repo_type()
# for path in self._cleanup_paths:
# shutil.rmtree(path, ignore_errors=True)
# self._cleanup_paths = []
#
# def time_clone(self, *args, **kwargs):
# cmd = ['clone', '--pull']
# tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
# self._cleanup_paths.append(tmp_clone_path)
# cmd.extend(self._remote_path_cmd(self.clone_path))
# cmd.append(tmp_clone_path)
# if self.rev:
# cmd.extend(['-r', self.rev])
# self.hg(*cmd)
# class CloneStreamTimeSuite(BaseExchangeMixin, BaseTestSuite):
# param_names = BaseTestSuite.param_names + ['repo_type', 'update']
# params = BaseTestSuite.params + [['local', 'ssh', 'http'], [False, True]]
# timer = timeit.default_timer
# # run 5 times or less than 15 minutes
# number = 1
# repeat = 5
# warmup_time = 0
# sample_time = (15 * 60) / (repeat * 1.3)
# timeout = 2700
# def setup(self, *args, **kwargs):
# args = list(args)
# repo_name = args[self.param_names.index('repo')]
# self.update = args.pop(-1)
# repo_type = args.pop(-1)
# super(CloneStreamTimeSuite, self).setup(*args, **kwargs)
# self._cleanup_paths = []
# self._setup_repo_type(repo_type)
# self.clone_path = os.path.join(REPOS_DIR, repo_name)
# def teardown(self, *args, **kwargs):
# self._teardown_repo_type()
# for path in self._cleanup_paths:
# shutil.rmtree(path, ignore_errors=True)
# self._cleanup_paths = []
# def time_clone_stream(self, *args, **kwargs):
# cmd = ['clone', '--stream']
# tmp_clone_path = os.path.abspath(tempfile.mkdtemp(dir='.'))
# self._cleanup_paths.append(tmp_clone_path)
# cmd.extend(self._remote_path_cmd(self.clone_path))
# cmd.append(tmp_clone_path)
# if not self.update:
# cmd.append('--noupdate')
# self.hg(*cmd)