mirror of
https://github.com/pre-commit/pre-commit.git
synced 2026-01-21 16:30:18 -06:00
Merge pull request #115 from pre-commit/reorganize_commands
Reorganize commands
This commit is contained in:
@@ -1,275 +0,0 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from asottile.ordereddict import OrderedDict
|
||||
from asottile.yaml import ordered_dump
|
||||
from asottile.yaml import ordered_load
|
||||
from plumbum import local
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import git
|
||||
from pre_commit import color
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import load_config
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
from pre_commit.logging_handler import LoggingHandler
|
||||
from pre_commit.output import get_hook_message
|
||||
from pre_commit.repository import Repository
|
||||
from pre_commit.staged_files_only import staged_files_only
|
||||
from pre_commit.util import noop_context
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def install(runner):
|
||||
"""Install the pre-commit hooks."""
|
||||
pre_commit_file = pkg_resources.resource_filename(
|
||||
'pre_commit', 'resources/pre-commit.sh',
|
||||
)
|
||||
with open(runner.pre_commit_path, 'w') as pre_commit_file_obj:
|
||||
pre_commit_file_obj.write(open(pre_commit_file).read())
|
||||
|
||||
original_mode = os.stat(runner.pre_commit_path).st_mode
|
||||
os.chmod(
|
||||
runner.pre_commit_path,
|
||||
original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
|
||||
)
|
||||
|
||||
print('pre-commit installed at {0}'.format(runner.pre_commit_path))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def uninstall(runner):
|
||||
"""Uninstall the pre-commit hooks."""
|
||||
if os.path.exists(runner.pre_commit_path):
|
||||
os.remove(runner.pre_commit_path)
|
||||
print('pre-commit uninstalled')
|
||||
return 0
|
||||
|
||||
|
||||
class RepositoryCannotBeUpdatedError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def _update_repository(repo_config, runner):
|
||||
"""Updates a repository to the tip of `master`. If the repository cannot
|
||||
be updated because a hook that is configured does not exist in `master`,
|
||||
this raises a RepositoryCannotBeUpdatedError
|
||||
|
||||
Args:
|
||||
repo_config - A config for a repository
|
||||
"""
|
||||
repo = Repository.create(repo_config, runner.store)
|
||||
|
||||
with local.cwd(repo.repo_path_getter.repo_path):
|
||||
local['git']['fetch']()
|
||||
head_sha = local['git']['rev-parse', 'origin/master']().strip()
|
||||
|
||||
# Don't bother trying to update if our sha is the same
|
||||
if head_sha == repo_config['sha']:
|
||||
return repo_config
|
||||
|
||||
# Construct a new config with the head sha
|
||||
new_config = OrderedDict(repo_config)
|
||||
new_config['sha'] = head_sha
|
||||
new_repo = Repository.create(new_config, runner.store)
|
||||
|
||||
# See if any of our hooks were deleted with the new commits
|
||||
hooks = set(repo.hooks.keys())
|
||||
hooks_missing = hooks - (hooks & set(new_repo.manifest.hooks.keys()))
|
||||
if hooks_missing:
|
||||
raise RepositoryCannotBeUpdatedError(
|
||||
'Cannot update because the tip of master is missing these hooks:\n'
|
||||
'{0}'.format(', '.join(sorted(hooks_missing)))
|
||||
)
|
||||
|
||||
return new_config
|
||||
|
||||
|
||||
def autoupdate(runner):
|
||||
"""Auto-update the pre-commit config to the latest versions of repos."""
|
||||
retv = 0
|
||||
output_configs = []
|
||||
changed = False
|
||||
|
||||
input_configs = load_config(
|
||||
runner.config_file_path,
|
||||
load_strategy=ordered_load,
|
||||
)
|
||||
|
||||
for repo_config in input_configs:
|
||||
sys.stdout.write('Updating {0}...'.format(repo_config['repo']))
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
new_repo_config = _update_repository(repo_config, runner)
|
||||
except RepositoryCannotBeUpdatedError as error:
|
||||
print(error.args[0])
|
||||
output_configs.append(repo_config)
|
||||
retv = 1
|
||||
continue
|
||||
|
||||
if new_repo_config['sha'] != repo_config['sha']:
|
||||
changed = True
|
||||
print(
|
||||
'updating {0} -> {1}.'.format(
|
||||
repo_config['sha'], new_repo_config['sha'],
|
||||
)
|
||||
)
|
||||
output_configs.append(new_repo_config)
|
||||
else:
|
||||
print('already up to date.')
|
||||
output_configs.append(repo_config)
|
||||
|
||||
if changed:
|
||||
with open(runner.config_file_path, 'w') as config_file:
|
||||
config_file.write(
|
||||
ordered_dump(
|
||||
remove_defaults(output_configs, CONFIG_JSON_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
)
|
||||
)
|
||||
|
||||
return retv
|
||||
|
||||
|
||||
def clean(runner):
|
||||
if os.path.exists(runner.store.directory):
|
||||
shutil.rmtree(runner.store.directory)
|
||||
print('Cleaned {0}.'.format(runner.store.directory))
|
||||
return 0
|
||||
|
||||
|
||||
def _get_skips(environ):
|
||||
skips = environ.get('SKIP', '')
|
||||
return set(skip.strip() for skip in skips.split(',') if skip.strip())
|
||||
|
||||
|
||||
def _hook_msg_start(hook, verbose):
|
||||
return '{0}{1}'.format(
|
||||
'[{0}] '.format(hook['id']) if verbose else '',
|
||||
hook['name'],
|
||||
)
|
||||
|
||||
|
||||
def _print_no_files_skipped(hook, write, args):
|
||||
write(get_hook_message(
|
||||
_hook_msg_start(hook, args.verbose),
|
||||
postfix='(no files to check) ',
|
||||
end_msg='Skipped',
|
||||
end_color=color.TURQUOISE,
|
||||
use_color=args.color,
|
||||
))
|
||||
|
||||
|
||||
def _print_user_skipped(hook, write, args):
|
||||
write(get_hook_message(
|
||||
_hook_msg_start(hook, args.verbose),
|
||||
end_msg='Skipped',
|
||||
end_color=color.YELLOW,
|
||||
use_color=args.color,
|
||||
))
|
||||
|
||||
|
||||
def _run_single_hook(runner, repository, hook_id, args, write, skips=set()):
|
||||
if args.all_files:
|
||||
get_filenames = git.get_all_files_matching
|
||||
elif git.is_in_merge_conflict():
|
||||
get_filenames = git.get_conflicted_files_matching
|
||||
else:
|
||||
get_filenames = git.get_staged_files_matching
|
||||
|
||||
hook = repository.hooks[hook_id]
|
||||
|
||||
filenames = get_filenames(hook['files'], hook['exclude'])
|
||||
if hook_id in skips:
|
||||
_print_user_skipped(hook, write, args)
|
||||
return 0
|
||||
elif not filenames:
|
||||
_print_no_files_skipped(hook, write, args)
|
||||
return 0
|
||||
|
||||
# Print the hook and the dots first in case the hook takes hella long to
|
||||
# run.
|
||||
write(get_hook_message(_hook_msg_start(hook, args.verbose), end_len=6))
|
||||
sys.stdout.flush()
|
||||
|
||||
retcode, stdout, stderr = repository.run_hook(hook_id, filenames)
|
||||
|
||||
if retcode != repository.hooks[hook_id]['expected_return_value']:
|
||||
retcode = 1
|
||||
print_color = color.RED
|
||||
pass_fail = 'Failed'
|
||||
else:
|
||||
retcode = 0
|
||||
print_color = color.GREEN
|
||||
pass_fail = 'Passed'
|
||||
|
||||
write(color.format_color(pass_fail, print_color, args.color) + '\n')
|
||||
|
||||
if (stdout or stderr) and (retcode or args.verbose):
|
||||
write('\n')
|
||||
for output in (stdout, stderr):
|
||||
if output.strip():
|
||||
write(output.strip() + '\n')
|
||||
write('\n')
|
||||
|
||||
return retcode
|
||||
|
||||
|
||||
def _run_hooks(runner, args, write, environ):
|
||||
"""Actually run the hooks."""
|
||||
retval = 0
|
||||
|
||||
skips = _get_skips(environ)
|
||||
|
||||
for repo in runner.repositories:
|
||||
for hook_id in repo.hooks:
|
||||
retval |= _run_single_hook(
|
||||
runner, repo, hook_id, args, write, skips=skips,
|
||||
)
|
||||
|
||||
return retval
|
||||
|
||||
|
||||
def _run_hook(runner, args, write):
|
||||
hook_id = args.hook
|
||||
for repo in runner.repositories:
|
||||
if hook_id in repo.hooks:
|
||||
return _run_single_hook(runner, repo, hook_id, args, write=write)
|
||||
else:
|
||||
write('No hook with id `{0}`\n'.format(hook_id))
|
||||
return 1
|
||||
|
||||
|
||||
def _has_unmerged_paths(runner):
|
||||
_, stdout, _ = runner.cmd_runner.run(['git', 'ls-files', '--unmerged'])
|
||||
return bool(stdout.strip())
|
||||
|
||||
|
||||
def run(runner, args, write=sys.stdout.write, environ=os.environ):
|
||||
# Set up our logging handler
|
||||
logger.addHandler(LoggingHandler(args.color, write=write))
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Check if we have unresolved merge conflict files and fail fast.
|
||||
if _has_unmerged_paths(runner):
|
||||
logger.error('Unmerged files. Resolve before committing.')
|
||||
return 1
|
||||
|
||||
if args.no_stash or args.all_files:
|
||||
ctx = noop_context()
|
||||
else:
|
||||
ctx = staged_files_only(runner.cmd_runner)
|
||||
|
||||
with ctx:
|
||||
if args.hook:
|
||||
return _run_hook(runner, args, write=write)
|
||||
else:
|
||||
return _run_hooks(runner, args, write=write, environ=environ)
|
||||
0
pre_commit/commands/__init__.py
Normal file
0
pre_commit/commands/__init__.py
Normal file
100
pre_commit/commands/autoupdate.py
Normal file
100
pre_commit/commands/autoupdate.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
from asottile.ordereddict import OrderedDict
|
||||
from asottile.yaml import ordered_dump
|
||||
from asottile.yaml import ordered_load
|
||||
from plumbum import local
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import load_config
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
from pre_commit.repository import Repository
|
||||
|
||||
|
||||
class RepositoryCannotBeUpdatedError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def _update_repository(repo_config, runner):
|
||||
"""Updates a repository to the tip of `master`. If the repository cannot
|
||||
be updated because a hook that is configured does not exist in `master`,
|
||||
this raises a RepositoryCannotBeUpdatedError
|
||||
|
||||
Args:
|
||||
repo_config - A config for a repository
|
||||
"""
|
||||
repo = Repository.create(repo_config, runner.store)
|
||||
|
||||
with local.cwd(repo.repo_path_getter.repo_path):
|
||||
local['git']['fetch']()
|
||||
head_sha = local['git']['rev-parse', 'origin/master']().strip()
|
||||
|
||||
# Don't bother trying to update if our sha is the same
|
||||
if head_sha == repo_config['sha']:
|
||||
return repo_config
|
||||
|
||||
# Construct a new config with the head sha
|
||||
new_config = OrderedDict(repo_config)
|
||||
new_config['sha'] = head_sha
|
||||
new_repo = Repository.create(new_config, runner.store)
|
||||
|
||||
# See if any of our hooks were deleted with the new commits
|
||||
hooks = set(repo.hooks.keys())
|
||||
hooks_missing = hooks - (hooks & set(new_repo.manifest.hooks.keys()))
|
||||
if hooks_missing:
|
||||
raise RepositoryCannotBeUpdatedError(
|
||||
'Cannot update because the tip of master is missing these hooks:\n'
|
||||
'{0}'.format(', '.join(sorted(hooks_missing)))
|
||||
)
|
||||
|
||||
return new_config
|
||||
|
||||
|
||||
def autoupdate(runner):
|
||||
"""Auto-update the pre-commit config to the latest versions of repos."""
|
||||
retv = 0
|
||||
output_configs = []
|
||||
changed = False
|
||||
|
||||
input_configs = load_config(
|
||||
runner.config_file_path,
|
||||
load_strategy=ordered_load,
|
||||
)
|
||||
|
||||
for repo_config in input_configs:
|
||||
sys.stdout.write('Updating {0}...'.format(repo_config['repo']))
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
new_repo_config = _update_repository(repo_config, runner)
|
||||
except RepositoryCannotBeUpdatedError as error:
|
||||
print(error.args[0])
|
||||
output_configs.append(repo_config)
|
||||
retv = 1
|
||||
continue
|
||||
|
||||
if new_repo_config['sha'] != repo_config['sha']:
|
||||
changed = True
|
||||
print(
|
||||
'updating {0} -> {1}.'.format(
|
||||
repo_config['sha'], new_repo_config['sha'],
|
||||
)
|
||||
)
|
||||
output_configs.append(new_repo_config)
|
||||
else:
|
||||
print('already up to date.')
|
||||
output_configs.append(repo_config)
|
||||
|
||||
if changed:
|
||||
with open(runner.config_file_path, 'w') as config_file:
|
||||
config_file.write(
|
||||
ordered_dump(
|
||||
remove_defaults(output_configs, CONFIG_JSON_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
)
|
||||
)
|
||||
|
||||
return retv
|
||||
12
pre_commit/commands/clean.py
Normal file
12
pre_commit/commands/clean.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
|
||||
def clean(runner):
|
||||
if os.path.exists(runner.store.directory):
|
||||
shutil.rmtree(runner.store.directory)
|
||||
print('Cleaned {0}.'.format(runner.store.directory))
|
||||
return 0
|
||||
24
pre_commit/commands/install.py
Normal file
24
pre_commit/commands/install.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import pkg_resources
|
||||
import stat
|
||||
|
||||
|
||||
def install(runner):
|
||||
"""Install the pre-commit hooks."""
|
||||
pre_commit_file = pkg_resources.resource_filename(
|
||||
'pre_commit', 'resources/pre-commit.sh',
|
||||
)
|
||||
with open(runner.pre_commit_path, 'w') as pre_commit_file_obj:
|
||||
pre_commit_file_obj.write(open(pre_commit_file).read())
|
||||
|
||||
original_mode = os.stat(runner.pre_commit_path).st_mode
|
||||
os.chmod(
|
||||
runner.pre_commit_path,
|
||||
original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
|
||||
)
|
||||
|
||||
print('pre-commit installed at {0}'.format(runner.pre_commit_path))
|
||||
return 0
|
||||
145
pre_commit/commands/run.py
Normal file
145
pre_commit/commands/run.py
Normal file
@@ -0,0 +1,145 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pre_commit import git
|
||||
from pre_commit import color
|
||||
from pre_commit.logging_handler import LoggingHandler
|
||||
from pre_commit.output import get_hook_message
|
||||
from pre_commit.staged_files_only import staged_files_only
|
||||
from pre_commit.util import noop_context
|
||||
|
||||
|
||||
logger = logging.getLogger('pre_commit')
|
||||
|
||||
|
||||
def _get_skips(environ):
|
||||
skips = environ.get('SKIP', '')
|
||||
return set(skip.strip() for skip in skips.split(',') if skip.strip())
|
||||
|
||||
|
||||
def _hook_msg_start(hook, verbose):
|
||||
return '{0}{1}'.format(
|
||||
'[{0}] '.format(hook['id']) if verbose else '',
|
||||
hook['name'],
|
||||
)
|
||||
|
||||
|
||||
def _print_no_files_skipped(hook, write, args):
|
||||
write(get_hook_message(
|
||||
_hook_msg_start(hook, args.verbose),
|
||||
postfix='(no files to check) ',
|
||||
end_msg='Skipped',
|
||||
end_color=color.TURQUOISE,
|
||||
use_color=args.color,
|
||||
))
|
||||
|
||||
|
||||
def _print_user_skipped(hook, write, args):
|
||||
write(get_hook_message(
|
||||
_hook_msg_start(hook, args.verbose),
|
||||
end_msg='Skipped',
|
||||
end_color=color.YELLOW,
|
||||
use_color=args.color,
|
||||
))
|
||||
|
||||
|
||||
def _run_single_hook(runner, repository, hook_id, args, write, skips=set()):
|
||||
if args.all_files:
|
||||
get_filenames = git.get_all_files_matching
|
||||
elif git.is_in_merge_conflict():
|
||||
get_filenames = git.get_conflicted_files_matching
|
||||
else:
|
||||
get_filenames = git.get_staged_files_matching
|
||||
|
||||
hook = repository.hooks[hook_id]
|
||||
|
||||
filenames = get_filenames(hook['files'], hook['exclude'])
|
||||
if hook_id in skips:
|
||||
_print_user_skipped(hook, write, args)
|
||||
return 0
|
||||
elif not filenames:
|
||||
_print_no_files_skipped(hook, write, args)
|
||||
return 0
|
||||
|
||||
# Print the hook and the dots first in case the hook takes hella long to
|
||||
# run.
|
||||
write(get_hook_message(_hook_msg_start(hook, args.verbose), end_len=6))
|
||||
sys.stdout.flush()
|
||||
|
||||
retcode, stdout, stderr = repository.run_hook(hook_id, filenames)
|
||||
|
||||
if retcode != repository.hooks[hook_id]['expected_return_value']:
|
||||
retcode = 1
|
||||
print_color = color.RED
|
||||
pass_fail = 'Failed'
|
||||
else:
|
||||
retcode = 0
|
||||
print_color = color.GREEN
|
||||
pass_fail = 'Passed'
|
||||
|
||||
write(color.format_color(pass_fail, print_color, args.color) + '\n')
|
||||
|
||||
if (stdout or stderr) and (retcode or args.verbose):
|
||||
write('\n')
|
||||
for output in (stdout, stderr):
|
||||
if output.strip():
|
||||
write(output.strip() + '\n')
|
||||
write('\n')
|
||||
|
||||
return retcode
|
||||
|
||||
|
||||
def _run_hooks(runner, args, write, environ):
|
||||
"""Actually run the hooks."""
|
||||
retval = 0
|
||||
|
||||
skips = _get_skips(environ)
|
||||
|
||||
for repo in runner.repositories:
|
||||
for hook_id in repo.hooks:
|
||||
retval |= _run_single_hook(
|
||||
runner, repo, hook_id, args, write, skips=skips,
|
||||
)
|
||||
|
||||
return retval
|
||||
|
||||
|
||||
def _run_hook(runner, args, write):
|
||||
hook_id = args.hook
|
||||
for repo in runner.repositories:
|
||||
if hook_id in repo.hooks:
|
||||
return _run_single_hook(runner, repo, hook_id, args, write=write)
|
||||
else:
|
||||
write('No hook with id `{0}`\n'.format(hook_id))
|
||||
return 1
|
||||
|
||||
|
||||
def _has_unmerged_paths(runner):
|
||||
_, stdout, _ = runner.cmd_runner.run(['git', 'ls-files', '--unmerged'])
|
||||
return bool(stdout.strip())
|
||||
|
||||
|
||||
def run(runner, args, write=sys.stdout.write, environ=os.environ):
|
||||
# Set up our logging handler
|
||||
logger.addHandler(LoggingHandler(args.color, write=write))
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Check if we have unresolved merge conflict files and fail fast.
|
||||
if _has_unmerged_paths(runner):
|
||||
logger.error('Unmerged files. Resolve before committing.')
|
||||
return 1
|
||||
|
||||
if args.no_stash or args.all_files:
|
||||
ctx = noop_context()
|
||||
else:
|
||||
ctx = staged_files_only(runner.cmd_runner)
|
||||
|
||||
with ctx:
|
||||
if args.hook:
|
||||
return _run_hook(runner, args, write=write)
|
||||
else:
|
||||
return _run_hooks(runner, args, write=write, environ=environ)
|
||||
13
pre_commit/commands/uninstall.py
Normal file
13
pre_commit/commands/uninstall.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
|
||||
def uninstall(runner):
|
||||
"""Uninstall the pre-commit hooks."""
|
||||
if os.path.exists(runner.pre_commit_path):
|
||||
os.remove(runner.pre_commit_path)
|
||||
print('pre-commit uninstalled')
|
||||
return 0
|
||||
@@ -1,14 +1,19 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from pre_commit import color
|
||||
from pre_commit import commands
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.clean import clean
|
||||
from pre_commit.commands.install import install
|
||||
from pre_commit.commands.run import run
|
||||
from pre_commit.commands.uninstall import uninstall
|
||||
from pre_commit.runner import Runner
|
||||
from pre_commit.util import entry
|
||||
|
||||
|
||||
@entry
|
||||
def run(argv):
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
@@ -52,15 +57,15 @@ def run(argv):
|
||||
runner = Runner.create()
|
||||
|
||||
if args.command == 'install':
|
||||
return commands.install(runner)
|
||||
return install(runner)
|
||||
elif args.command == 'uninstall':
|
||||
return commands.uninstall(runner)
|
||||
return uninstall(runner)
|
||||
elif args.command == 'clean':
|
||||
return commands.clean(runner)
|
||||
return clean(runner)
|
||||
elif args.command == 'autoupdate':
|
||||
return commands.autoupdate(runner)
|
||||
return autoupdate(runner)
|
||||
elif args.command == 'run':
|
||||
return commands.run(runner, args)
|
||||
return run(runner, args)
|
||||
elif args.command == 'help':
|
||||
if args.help_cmd:
|
||||
parser.parse_args([args.help_cmd, '--help'])
|
||||
@@ -77,4 +82,4 @@ def run(argv):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(run())
|
||||
exit(main())
|
||||
2
setup.py
2
setup.py
@@ -44,7 +44,7 @@ setup(
|
||||
],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'pre-commit = pre_commit.run:run',
|
||||
'pre-commit = pre_commit.main:main',
|
||||
'validate-config = pre_commit.clientlib.validate_config:run',
|
||||
'validate-manifest = pre_commit.clientlib.validate_manifest:run',
|
||||
],
|
||||
|
||||
0
tests/commands/__init__.py
Normal file
0
tests/commands/__init__.py
Normal file
159
tests/commands/autoupdate_test.py
Normal file
159
tests/commands/autoupdate_test.py
Normal file
@@ -0,0 +1,159 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import pytest
|
||||
import shutil
|
||||
from asottile.ordereddict import OrderedDict
|
||||
from asottile.yaml import ordered_dump
|
||||
from plumbum import local
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||
from pre_commit.commands.autoupdate import _update_repository
|
||||
from pre_commit.commands.autoupdate import autoupdate
|
||||
from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
from pre_commit.runner import Runner
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.util import get_head_sha
|
||||
from testing.util import get_resource_path
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def up_to_date_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'),))]),
|
||||
))
|
||||
wrapped_config = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(wrapped_config)
|
||||
config = wrapped_config[0]
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump(
|
||||
remove_defaults([config], CONFIG_JSON_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_up_to_date_repo(up_to_date_repo, runner_with_mocked_store):
|
||||
input_sha = up_to_date_repo.repo_config['sha']
|
||||
ret = _update_repository(
|
||||
up_to_date_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
assert ret['sha'] == input_sha
|
||||
|
||||
|
||||
def test_autoupdate_up_to_date_repo(up_to_date_repo, mock_out_store_directory):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
assert '^$' not in before
|
||||
runner = Runner(up_to_date_repo.python_hooks_repo)
|
||||
ret = autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 0
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def out_of_date_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'), ('files', '')))]),
|
||||
))
|
||||
config_wrapped = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config_wrapped)
|
||||
config = config_wrapped[0]
|
||||
local['git']['commit', '--allow-empty', '-m', 'foo']()
|
||||
head_sha = get_head_sha(python_hooks_repo)
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump([config], **C.YAML_DUMP_KWARGS)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
head_sha=head_sha,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_out_of_date_repo(out_of_date_repo, runner_with_mocked_store):
|
||||
ret = _update_repository(
|
||||
out_of_date_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
assert ret['sha'] == out_of_date_repo.head_sha
|
||||
|
||||
|
||||
def test_autoupdate_out_of_date_repo(
|
||||
out_of_date_repo, mock_out_store_directory
|
||||
):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
runner = Runner(out_of_date_repo.python_hooks_repo)
|
||||
ret = autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 0
|
||||
assert before != after
|
||||
# Make sure we don't add defaults
|
||||
assert 'exclude' not in after
|
||||
assert out_of_date_repo.head_sha in after
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def hook_disappearing_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'),))]),
|
||||
))
|
||||
config_wrapped = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config_wrapped)
|
||||
config = config_wrapped[0]
|
||||
shutil.copy(
|
||||
get_resource_path('manifest_without_foo.yaml'),
|
||||
C.MANIFEST_FILE,
|
||||
)
|
||||
local['git']['add', '.']()
|
||||
local['git']['commit', '-m', 'Remove foo']()
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump([config], **C.YAML_DUMP_KWARGS)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_hook_disppearing_repo_raises(
|
||||
hook_disappearing_repo, runner_with_mocked_store
|
||||
):
|
||||
with pytest.raises(RepositoryCannotBeUpdatedError):
|
||||
_update_repository(
|
||||
hook_disappearing_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
|
||||
|
||||
def test_autoupdate_hook_disappearing_repo(
|
||||
hook_disappearing_repo, mock_out_store_directory
|
||||
):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
runner = Runner(hook_disappearing_repo.python_hooks_repo)
|
||||
ret = autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 1
|
||||
assert before == after
|
||||
20
tests/commands/clean_test.py
Normal file
20
tests/commands/clean_test.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
from pre_commit.commands.clean import clean
|
||||
|
||||
|
||||
def test_clean(runner_with_mocked_store):
|
||||
assert os.path.exists(runner_with_mocked_store.store.directory)
|
||||
clean(runner_with_mocked_store)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
|
||||
|
||||
def test_clean_empty(runner_with_mocked_store):
|
||||
"""Make sure clean succeeds when we the directory doesn't exist."""
|
||||
shutil.rmtree(runner_with_mocked_store.store.directory)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
clean(runner_with_mocked_store)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
25
tests/commands/install_test.py
Normal file
25
tests/commands/install_test.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import pkg_resources
|
||||
import stat
|
||||
|
||||
from pre_commit.commands.install import install
|
||||
from pre_commit.runner import Runner
|
||||
|
||||
|
||||
def test_install_pre_commit(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
ret = install(runner)
|
||||
assert ret == 0
|
||||
assert os.path.exists(runner.pre_commit_path)
|
||||
pre_commit_contents = io.open(runner.pre_commit_path).read()
|
||||
pre_commit_sh = pkg_resources.resource_filename(
|
||||
'pre_commit', 'resources/pre-commit.sh',
|
||||
)
|
||||
expected_contents = io.open(pre_commit_sh).read()
|
||||
assert pre_commit_contents == expected_contents
|
||||
stat_result = os.stat(runner.pre_commit_path)
|
||||
assert stat_result.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
198
tests/commands/run_test.py
Normal file
198
tests/commands/run_test.py
Normal file
@@ -0,0 +1,198 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import mock
|
||||
import os
|
||||
import os.path
|
||||
import pytest
|
||||
from plumbum import local
|
||||
|
||||
from pre_commit.commands.run import _get_skips
|
||||
from pre_commit.commands.run import _has_unmerged_paths
|
||||
from pre_commit.commands.run import run
|
||||
from pre_commit.runner import Runner
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
|
||||
|
||||
def stage_a_file():
|
||||
local['touch']['foo.py']()
|
||||
local['git']['add', 'foo.py']()
|
||||
|
||||
|
||||
def get_write_mock_output(write_mock):
|
||||
return ''.join(call[0][0] for call in write_mock.call_args_list)
|
||||
|
||||
|
||||
def _get_opts(
|
||||
all_files=False,
|
||||
color=False,
|
||||
verbose=False,
|
||||
hook=None,
|
||||
no_stash=False,
|
||||
):
|
||||
return auto_namedtuple(
|
||||
all_files=all_files,
|
||||
color=color,
|
||||
verbose=verbose,
|
||||
hook=hook,
|
||||
no_stash=no_stash,
|
||||
)
|
||||
|
||||
|
||||
def _do_run(repo, args, environ={}):
|
||||
runner = Runner(repo)
|
||||
write_mock = mock.Mock()
|
||||
ret = run(runner, args, write=write_mock, environ=environ)
|
||||
printed = get_write_mock_output(write_mock)
|
||||
return ret, printed
|
||||
|
||||
|
||||
def _test_run(repo, options, expected_outputs, expected_ret, stage):
|
||||
if stage:
|
||||
stage_a_file()
|
||||
args = _get_opts(**options)
|
||||
ret, printed = _do_run(repo, args)
|
||||
assert ret == expected_ret
|
||||
for expected_output_part in expected_outputs:
|
||||
assert expected_output_part in printed
|
||||
|
||||
|
||||
def test_run_all_hooks_failing(
|
||||
repo_with_failing_hook, mock_out_store_directory
|
||||
):
|
||||
_test_run(
|
||||
repo_with_failing_hook,
|
||||
{},
|
||||
('Failing hook', 'Failed', 'Fail\nfoo.py\n'),
|
||||
1,
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('options', 'outputs', 'expected_ret', 'stage'),
|
||||
(
|
||||
({}, ('Bash hook', 'Passed'), 0, True),
|
||||
({'verbose': True}, ('foo.py\nHello World',), 0, True),
|
||||
({'hook': 'bash_hook'}, ('Bash hook', 'Passed'), 0, True),
|
||||
({'hook': 'nope'}, ('No hook with id `nope`',), 1, True),
|
||||
# All the files in the repo.
|
||||
# This seems kind of weird but it is beacuse py.test reuses fixtures
|
||||
(
|
||||
{'all_files': True, 'verbose': True},
|
||||
('hooks.yaml', 'bin/hook.sh', 'foo.py', 'dummy'),
|
||||
0,
|
||||
True,
|
||||
),
|
||||
({}, ('Bash hook', '(no files to check)', 'Skipped'), 0, False),
|
||||
)
|
||||
)
|
||||
def test_run(
|
||||
repo_with_passing_hook,
|
||||
options,
|
||||
outputs,
|
||||
expected_ret,
|
||||
stage,
|
||||
mock_out_store_directory,
|
||||
):
|
||||
_test_run(repo_with_passing_hook, options, outputs, expected_ret, stage)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('no_stash', 'all_files', 'expect_stash'),
|
||||
(
|
||||
(True, True, False),
|
||||
(True, False, False),
|
||||
(False, True, False),
|
||||
(False, False, True),
|
||||
),
|
||||
)
|
||||
def test_no_stash(
|
||||
repo_with_passing_hook,
|
||||
no_stash,
|
||||
all_files,
|
||||
expect_stash,
|
||||
mock_out_store_directory,
|
||||
):
|
||||
stage_a_file()
|
||||
# Make unstaged changes
|
||||
with open('foo.py', 'w') as foo_file:
|
||||
foo_file.write('import os\n')
|
||||
|
||||
args = _get_opts(no_stash=no_stash, all_files=all_files)
|
||||
ret, printed = _do_run(repo_with_passing_hook, args)
|
||||
assert ret == 0
|
||||
warning_msg = '[WARNING] Unstaged files detected.'
|
||||
if expect_stash:
|
||||
assert warning_msg in printed
|
||||
else:
|
||||
assert warning_msg not in printed
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('output', 'expected'), (('some', True), ('', False)))
|
||||
def test_has_unmerged_paths(output, expected):
|
||||
mock_runner = mock.Mock()
|
||||
mock_runner.cmd_runner.run.return_value = (1, output, '')
|
||||
assert _has_unmerged_paths(mock_runner) is expected
|
||||
|
||||
|
||||
def test_merge_conflict(in_merge_conflict, mock_out_store_directory):
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
assert ret == 1
|
||||
assert 'Unmerged files. Resolve before committing.' in printed
|
||||
|
||||
|
||||
def test_merge_conflict_modified(in_merge_conflict, mock_out_store_directory):
|
||||
# Touch another file so we have unstaged non-conflicting things
|
||||
assert os.path.exists('dummy')
|
||||
with open('dummy', 'w') as dummy_file:
|
||||
dummy_file.write('bar\nbaz\n')
|
||||
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
assert ret == 1
|
||||
assert 'Unmerged files. Resolve before committing.' in printed
|
||||
|
||||
|
||||
def test_merge_conflict_resolved(in_merge_conflict, mock_out_store_directory):
|
||||
local['git']['add', '.']()
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
for msg in ('Checking merge-conflict files only.', 'Bash hook', 'Passed'):
|
||||
assert msg in printed
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('environ', 'expected_output'),
|
||||
(
|
||||
({}, set([])),
|
||||
({'SKIP': ''}, set([])),
|
||||
({'SKIP': ','}, set([])),
|
||||
({'SKIP': ',foo'}, set(['foo'])),
|
||||
({'SKIP': 'foo'}, set(['foo'])),
|
||||
({'SKIP': 'foo,bar'}, set(['foo', 'bar'])),
|
||||
({'SKIP': ' foo , bar'}, set(['foo', 'bar'])),
|
||||
),
|
||||
)
|
||||
def test_get_skips(environ, expected_output):
|
||||
ret = _get_skips(environ)
|
||||
assert ret == expected_output
|
||||
|
||||
|
||||
def test_skip_hook(repo_with_passing_hook, mock_out_store_directory):
|
||||
ret, printed = _do_run(
|
||||
repo_with_passing_hook, _get_opts(), {'SKIP': 'bash_hook'},
|
||||
)
|
||||
for msg in ('Bash hook', 'Skipped'):
|
||||
assert msg in printed
|
||||
|
||||
|
||||
def test_hook_id_not_in_non_verbose_output(
|
||||
repo_with_passing_hook, mock_out_store_directory
|
||||
):
|
||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=False))
|
||||
assert '[bash_hook]' not in printed
|
||||
|
||||
|
||||
def test_hook_id_in_verbose_output(
|
||||
repo_with_passing_hook, mock_out_store_directory
|
||||
):
|
||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=True))
|
||||
assert '[bash_hook] Bash hook' in printed
|
||||
22
tests/commands/uninstall_test.py
Normal file
22
tests/commands/uninstall_test.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os.path
|
||||
|
||||
from pre_commit.runner import Runner
|
||||
from pre_commit.commands.install import install
|
||||
from pre_commit.commands.uninstall import uninstall
|
||||
|
||||
|
||||
def test_uninstall_pre_commit_does_not_blow_up_when_not_there(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
ret = uninstall(runner)
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_uninstall(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
assert not os.path.exists(runner.pre_commit_path)
|
||||
install(runner)
|
||||
assert os.path.exists(runner.pre_commit_path)
|
||||
uninstall(runner)
|
||||
assert not os.path.exists(runner.pre_commit_path)
|
||||
@@ -1,392 +0,0 @@
|
||||
import mock
|
||||
import os
|
||||
import os.path
|
||||
import pkg_resources
|
||||
import pytest
|
||||
import shutil
|
||||
import stat
|
||||
from asottile.ordereddict import OrderedDict
|
||||
from asottile.yaml import ordered_dump
|
||||
from plumbum import local
|
||||
|
||||
import pre_commit.constants as C
|
||||
from pre_commit import commands
|
||||
from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from pre_commit.jsonschema_extensions import remove_defaults
|
||||
from pre_commit.runner import Runner
|
||||
from testing.auto_namedtuple import auto_namedtuple
|
||||
from testing.util import get_head_sha
|
||||
from testing.util import get_resource_path
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def runner_with_mocked_store(mock_out_store_directory):
|
||||
yield Runner('/')
|
||||
|
||||
|
||||
def test_install_pre_commit(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
ret = commands.install(runner)
|
||||
assert ret == 0
|
||||
assert os.path.exists(runner.pre_commit_path)
|
||||
pre_commit_contents = open(runner.pre_commit_path).read()
|
||||
pre_commit_sh = pkg_resources.resource_filename(
|
||||
'pre_commit', 'resources/pre-commit.sh',
|
||||
)
|
||||
expected_contents = open(pre_commit_sh).read()
|
||||
assert pre_commit_contents == expected_contents
|
||||
stat_result = os.stat(runner.pre_commit_path)
|
||||
assert stat_result.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
|
||||
|
||||
def test_uninstall_pre_commit_does_not_blow_up_when_not_there(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
ret = commands.uninstall(runner)
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_uninstall(empty_git_dir):
|
||||
runner = Runner(empty_git_dir)
|
||||
assert not os.path.exists(runner.pre_commit_path)
|
||||
commands.install(runner)
|
||||
assert os.path.exists(runner.pre_commit_path)
|
||||
commands.uninstall(runner)
|
||||
assert not os.path.exists(runner.pre_commit_path)
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def up_to_date_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'),))]),
|
||||
))
|
||||
wrapped_config = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(wrapped_config)
|
||||
config = wrapped_config[0]
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump(
|
||||
remove_defaults([config], CONFIG_JSON_SCHEMA),
|
||||
**C.YAML_DUMP_KWARGS
|
||||
)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_up_to_date_repo(up_to_date_repo, runner_with_mocked_store):
|
||||
input_sha = up_to_date_repo.repo_config['sha']
|
||||
ret = commands._update_repository(
|
||||
up_to_date_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
assert ret['sha'] == input_sha
|
||||
|
||||
|
||||
def test_autoupdate_up_to_date_repo(up_to_date_repo, mock_out_store_directory):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
assert '^$' not in before
|
||||
runner = Runner(up_to_date_repo.python_hooks_repo)
|
||||
ret = commands.autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 0
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def out_of_date_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'), ('files', '')))]),
|
||||
))
|
||||
config_wrapped = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config_wrapped)
|
||||
config = config_wrapped[0]
|
||||
local['git']['commit', '--allow-empty', '-m', 'foo']()
|
||||
head_sha = get_head_sha(python_hooks_repo)
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump([config], **C.YAML_DUMP_KWARGS)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
head_sha=head_sha,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_out_of_date_repo(out_of_date_repo, runner_with_mocked_store):
|
||||
ret = commands._update_repository(
|
||||
out_of_date_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
assert ret['sha'] == out_of_date_repo.head_sha
|
||||
|
||||
|
||||
def test_autoupdate_out_of_date_repo(
|
||||
out_of_date_repo, mock_out_store_directory
|
||||
):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
runner = Runner(out_of_date_repo.python_hooks_repo)
|
||||
ret = commands.autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 0
|
||||
assert before != after
|
||||
# Make sure we don't add defaults
|
||||
assert 'exclude' not in after
|
||||
assert out_of_date_repo.head_sha in after
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def hook_disappearing_repo(python_hooks_repo):
|
||||
config = OrderedDict((
|
||||
('repo', python_hooks_repo),
|
||||
('sha', get_head_sha(python_hooks_repo)),
|
||||
('hooks', [OrderedDict((('id', 'foo'),))]),
|
||||
))
|
||||
config_wrapped = apply_defaults([config], CONFIG_JSON_SCHEMA)
|
||||
validate_config_extra(config_wrapped)
|
||||
config = config_wrapped[0]
|
||||
shutil.copy(
|
||||
get_resource_path('manifest_without_foo.yaml'),
|
||||
C.MANIFEST_FILE,
|
||||
)
|
||||
local['git']['add', '.']()
|
||||
local['git']['commit', '-m', 'Remove foo']()
|
||||
|
||||
with open(os.path.join(python_hooks_repo, C.CONFIG_FILE), 'w') as file_obj:
|
||||
file_obj.write(
|
||||
ordered_dump([config], **C.YAML_DUMP_KWARGS)
|
||||
)
|
||||
|
||||
yield auto_namedtuple(
|
||||
repo_config=config,
|
||||
python_hooks_repo=python_hooks_repo,
|
||||
)
|
||||
|
||||
|
||||
def test_hook_disppearing_repo_raises(
|
||||
hook_disappearing_repo, runner_with_mocked_store
|
||||
):
|
||||
with pytest.raises(commands.RepositoryCannotBeUpdatedError):
|
||||
commands._update_repository(
|
||||
hook_disappearing_repo.repo_config, runner_with_mocked_store,
|
||||
)
|
||||
|
||||
|
||||
def test_autoupdate_hook_disappearing_repo(
|
||||
hook_disappearing_repo, mock_out_store_directory
|
||||
):
|
||||
before = open(C.CONFIG_FILE).read()
|
||||
runner = Runner(hook_disappearing_repo.python_hooks_repo)
|
||||
ret = commands.autoupdate(runner)
|
||||
after = open(C.CONFIG_FILE).read()
|
||||
assert ret == 1
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_clean(runner_with_mocked_store):
|
||||
assert os.path.exists(runner_with_mocked_store.store.directory)
|
||||
commands.clean(runner_with_mocked_store)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
|
||||
|
||||
def test_clean_empty(runner_with_mocked_store):
|
||||
"""Make sure clean succeeds when we the directory doesn't exist."""
|
||||
shutil.rmtree(runner_with_mocked_store.store.directory)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
commands.clean(runner_with_mocked_store)
|
||||
assert not os.path.exists(runner_with_mocked_store.store.directory)
|
||||
|
||||
|
||||
def stage_a_file():
|
||||
local['touch']['foo.py']()
|
||||
local['git']['add', 'foo.py']()
|
||||
|
||||
|
||||
def get_write_mock_output(write_mock):
|
||||
return ''.join(call[0][0] for call in write_mock.call_args_list)
|
||||
|
||||
|
||||
def _get_opts(
|
||||
all_files=False,
|
||||
color=False,
|
||||
verbose=False,
|
||||
hook=None,
|
||||
no_stash=False,
|
||||
):
|
||||
return auto_namedtuple(
|
||||
all_files=all_files,
|
||||
color=color,
|
||||
verbose=verbose,
|
||||
hook=hook,
|
||||
no_stash=no_stash,
|
||||
)
|
||||
|
||||
|
||||
def _do_run(repo, args, environ={}):
|
||||
runner = Runner(repo)
|
||||
write_mock = mock.Mock()
|
||||
ret = commands.run(runner, args, write=write_mock, environ=environ)
|
||||
printed = get_write_mock_output(write_mock)
|
||||
return ret, printed
|
||||
|
||||
|
||||
def _test_run(repo, options, expected_outputs, expected_ret, stage):
|
||||
if stage:
|
||||
stage_a_file()
|
||||
args = _get_opts(**options)
|
||||
ret, printed = _do_run(repo, args)
|
||||
assert ret == expected_ret
|
||||
for expected_output_part in expected_outputs:
|
||||
assert expected_output_part in printed
|
||||
|
||||
|
||||
def test_run_all_hooks_failing(
|
||||
repo_with_failing_hook, mock_out_store_directory
|
||||
):
|
||||
_test_run(
|
||||
repo_with_failing_hook,
|
||||
{},
|
||||
('Failing hook', 'Failed', 'Fail\nfoo.py\n'),
|
||||
1,
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('options', 'outputs', 'expected_ret', 'stage'),
|
||||
(
|
||||
({}, ('Bash hook', 'Passed'), 0, True),
|
||||
({'verbose': True}, ('foo.py\nHello World',), 0, True),
|
||||
({'hook': 'bash_hook'}, ('Bash hook', 'Passed'), 0, True),
|
||||
({'hook': 'nope'}, ('No hook with id `nope`',), 1, True),
|
||||
# All the files in the repo.
|
||||
# This seems kind of weird but it is beacuse py.test reuses fixtures
|
||||
(
|
||||
{'all_files': True, 'verbose': True},
|
||||
('hooks.yaml', 'bin/hook.sh', 'foo.py', 'dummy'),
|
||||
0,
|
||||
True,
|
||||
),
|
||||
({}, ('Bash hook', '(no files to check)', 'Skipped'), 0, False),
|
||||
)
|
||||
)
|
||||
def test_run(
|
||||
repo_with_passing_hook,
|
||||
options,
|
||||
outputs,
|
||||
expected_ret,
|
||||
stage,
|
||||
mock_out_store_directory,
|
||||
):
|
||||
_test_run(repo_with_passing_hook, options, outputs, expected_ret, stage)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('no_stash', 'all_files', 'expect_stash'),
|
||||
(
|
||||
(True, True, False),
|
||||
(True, False, False),
|
||||
(False, True, False),
|
||||
(False, False, True),
|
||||
),
|
||||
)
|
||||
def test_no_stash(
|
||||
repo_with_passing_hook,
|
||||
no_stash,
|
||||
all_files,
|
||||
expect_stash,
|
||||
mock_out_store_directory,
|
||||
):
|
||||
stage_a_file()
|
||||
# Make unstaged changes
|
||||
with open('foo.py', 'w') as foo_file:
|
||||
foo_file.write('import os\n')
|
||||
|
||||
args = _get_opts(no_stash=no_stash, all_files=all_files)
|
||||
ret, printed = _do_run(repo_with_passing_hook, args)
|
||||
assert ret == 0
|
||||
warning_msg = '[WARNING] Unstaged files detected.'
|
||||
if expect_stash:
|
||||
assert warning_msg in printed
|
||||
else:
|
||||
assert warning_msg not in printed
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('output', 'expected'), (('some', True), ('', False)))
|
||||
def test_has_unmerged_paths(output, expected):
|
||||
mock_runner = mock.Mock()
|
||||
mock_runner.cmd_runner.run.return_value = (1, output, '')
|
||||
assert commands._has_unmerged_paths(mock_runner) is expected
|
||||
|
||||
|
||||
def test_merge_conflict(in_merge_conflict, mock_out_store_directory):
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
assert ret == 1
|
||||
assert 'Unmerged files. Resolve before committing.' in printed
|
||||
|
||||
|
||||
def test_merge_conflict_modified(in_merge_conflict, mock_out_store_directory):
|
||||
# Touch another file so we have unstaged non-conflicting things
|
||||
assert os.path.exists('dummy')
|
||||
with open('dummy', 'w') as dummy_file:
|
||||
dummy_file.write('bar\nbaz\n')
|
||||
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
assert ret == 1
|
||||
assert 'Unmerged files. Resolve before committing.' in printed
|
||||
|
||||
|
||||
def test_merge_conflict_resolved(in_merge_conflict, mock_out_store_directory):
|
||||
local['git']['add', '.']()
|
||||
ret, printed = _do_run(in_merge_conflict, _get_opts())
|
||||
for msg in ('Checking merge-conflict files only.', 'Bash hook', 'Passed'):
|
||||
assert msg in printed
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('environ', 'expected_output'),
|
||||
(
|
||||
({}, set([])),
|
||||
({'SKIP': ''}, set([])),
|
||||
({'SKIP': ','}, set([])),
|
||||
({'SKIP': ',foo'}, set(['foo'])),
|
||||
({'SKIP': 'foo'}, set(['foo'])),
|
||||
({'SKIP': 'foo,bar'}, set(['foo', 'bar'])),
|
||||
({'SKIP': ' foo , bar'}, set(['foo', 'bar'])),
|
||||
),
|
||||
)
|
||||
def test_get_skips(environ, expected_output):
|
||||
ret = commands._get_skips(environ)
|
||||
assert ret == expected_output
|
||||
|
||||
|
||||
def test_skip_hook(repo_with_passing_hook, mock_out_store_directory):
|
||||
ret, printed = _do_run(
|
||||
repo_with_passing_hook, _get_opts(), {'SKIP': 'bash_hook'},
|
||||
)
|
||||
for msg in ('Bash hook', 'Skipped'):
|
||||
assert msg in printed
|
||||
|
||||
|
||||
def test_hook_id_not_in_non_verbose_output(
|
||||
repo_with_passing_hook, mock_out_store_directory
|
||||
):
|
||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=False))
|
||||
assert '[bash_hook]' not in printed
|
||||
|
||||
|
||||
def test_hook_id_in_verbose_output(
|
||||
repo_with_passing_hook, mock_out_store_directory
|
||||
):
|
||||
ret, printed = _do_run(repo_with_passing_hook, _get_opts(verbose=True))
|
||||
assert '[bash_hook] Bash hook' in printed
|
||||
@@ -14,6 +14,7 @@ from pre_commit.clientlib.validate_config import CONFIG_JSON_SCHEMA
|
||||
from pre_commit.clientlib.validate_config import validate_config_extra
|
||||
from pre_commit.jsonschema_extensions import apply_defaults
|
||||
from pre_commit.prefixed_command_runner import PrefixedCommandRunner
|
||||
from pre_commit.runner import Runner
|
||||
from pre_commit.store import Store
|
||||
from testing.util import copy_tree_to_path
|
||||
from testing.util import get_head_sha
|
||||
@@ -264,3 +265,8 @@ def store(tmpdir_factory):
|
||||
@pytest.yield_fixture
|
||||
def cmd_runner(tmpdir_factory):
|
||||
yield PrefixedCommandRunner(tmpdir_factory.get())
|
||||
|
||||
|
||||
@pytest.yield_fixture
|
||||
def runner_with_mocked_store(mock_out_store_directory):
|
||||
yield Runner('/')
|
||||
|
||||
@@ -33,7 +33,7 @@ def test_cherry_pick_conflict(in_merge_conflict):
|
||||
def get_files_matching_func():
|
||||
def get_filenames():
|
||||
return (
|
||||
'pre_commit/run.py',
|
||||
'pre_commit/main.py',
|
||||
'pre_commit/git.py',
|
||||
'im_a_file_that_doesnt_exist.py',
|
||||
'hooks.yaml',
|
||||
@@ -45,7 +45,7 @@ def get_files_matching_func():
|
||||
def test_get_files_matching_base(get_files_matching_func):
|
||||
ret = get_files_matching_func('', '^$')
|
||||
assert ret == set([
|
||||
'pre_commit/run.py',
|
||||
'pre_commit/main.py',
|
||||
'pre_commit/git.py',
|
||||
'hooks.yaml',
|
||||
])
|
||||
@@ -54,7 +54,7 @@ def test_get_files_matching_base(get_files_matching_func):
|
||||
def test_get_files_matching_total_match(get_files_matching_func):
|
||||
ret = get_files_matching_func('^.*\\.py$', '^$')
|
||||
assert ret == set([
|
||||
'pre_commit/run.py',
|
||||
'pre_commit/main.py',
|
||||
'pre_commit/git.py',
|
||||
])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user