Merge pull request #906 from asottile/gc

pre-commit gc
This commit is contained in:
Anthony Sottile
2019-01-02 19:46:38 -08:00
committed by GitHub
12 changed files with 412 additions and 116 deletions

83
pre_commit/commands/gc.py Normal file
View File

@@ -0,0 +1,83 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path
import pre_commit.constants as C
from pre_commit import output
from pre_commit.clientlib import InvalidConfigError
from pre_commit.clientlib import InvalidManifestError
from pre_commit.clientlib import is_local_repo
from pre_commit.clientlib import is_meta_repo
from pre_commit.clientlib import load_config
from pre_commit.clientlib import load_manifest
def _mark_used_repos(store, all_repos, unused_repos, repo):
if is_meta_repo(repo):
return
elif is_local_repo(repo):
for hook in repo['hooks']:
deps = hook.get('additional_dependencies')
unused_repos.discard((
store.db_repo_name(repo['repo'], deps), C.LOCAL_REPO_VERSION,
))
else:
key = (repo['repo'], repo['rev'])
path = all_repos.get(key)
# can't inspect manifest if it isn't cloned
if path is None:
return
try:
manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE))
except InvalidManifestError:
return
else:
unused_repos.discard(key)
by_id = {hook['id']: hook for hook in manifest}
for hook in repo['hooks']:
if hook['id'] not in by_id:
continue
deps = hook.get(
'additional_dependencies',
by_id[hook['id']]['additional_dependencies'],
)
unused_repos.discard((
store.db_repo_name(repo['repo'], deps), repo['rev'],
))
def _gc_repos(store):
configs = store.select_all_configs()
repos = store.select_all_repos()
# delete config paths which do not exist
dead_configs = [p for p in configs if not os.path.exists(p)]
live_configs = [p for p in configs if os.path.exists(p)]
all_repos = {(repo, ref): path for repo, ref, path in repos}
unused_repos = set(all_repos)
for config_path in live_configs:
try:
config = load_config(config_path)
except InvalidConfigError:
dead_configs.append(config_path)
continue
else:
for repo in config['repos']:
_mark_used_repos(store, all_repos, unused_repos, repo)
store.delete_configs(dead_configs)
for db_repo_name, ref in unused_repos:
store.delete_repo(db_repo_name, ref, all_repos[(db_repo_name, ref)])
return len(unused_repos)
def gc(store):
with store.exclusive_lock():
repos_removed = _gc_repos(store)
output.write_line('{} repo(s) removed.'.format(repos_removed))
return 0

View File

@@ -32,7 +32,6 @@ def _log_and_exit(msg, exc, formatted):
))
output.write(error_msg)
store = Store()
store.require_created()
log_path = os.path.join(store.directory, 'pre-commit.log')
output.write_line('Check the log at {}'.format(log_path))
with open(log_path, 'wb') as log:

View File

@@ -11,6 +11,7 @@ from pre_commit import five
from pre_commit import git
from pre_commit.commands.autoupdate import autoupdate
from pre_commit.commands.clean import clean
from pre_commit.commands.gc import gc
from pre_commit.commands.install_uninstall import install
from pre_commit.commands.install_uninstall import install_hooks
from pre_commit.commands.install_uninstall import uninstall
@@ -176,6 +177,11 @@ def main(argv=None):
)
_add_color_option(clean_parser)
_add_config_option(clean_parser)
gc_parser = subparsers.add_parser('gc', help='Clean unused cached repos.')
_add_color_option(gc_parser)
_add_config_option(gc_parser)
autoupdate_parser = subparsers.add_parser(
'autoupdate',
help="Auto-update pre-commit config to the latest repos' versions.",
@@ -251,9 +257,11 @@ def main(argv=None):
with error_handler(), logging_handler(args.color):
_adjust_args_and_chdir(args)
store = Store()
git.check_for_cygwin_mismatch()
store = Store()
store.mark_config_used(args.config)
if args.command == 'install':
return install(
args.config, store,
@@ -267,6 +275,8 @@ def main(argv=None):
return uninstall(hook_type=args.hook_type)
elif args.command == 'clean':
return clean(store)
elif args.command == 'gc':
return gc(store)
elif args.command == 'autoupdate':
if args.tags_only:
logger.warning('--tags-only is the default')

View File

@@ -13,6 +13,7 @@ from pre_commit import git
from pre_commit.util import clean_path_on_failure
from pre_commit.util import cmd_output
from pre_commit.util import resource_text
from pre_commit.util import rmtree
logger = logging.getLogger('pre_commit')
@@ -33,10 +34,43 @@ def _get_default_directory():
class Store(object):
get_default_directory = staticmethod(_get_default_directory)
__created = False
def __init__(self, directory=None):
self.directory = directory or Store.get_default_directory()
self.db_path = os.path.join(self.directory, 'db.db')
if not os.path.exists(self.directory):
os.makedirs(self.directory)
with io.open(os.path.join(self.directory, 'README'), 'w') as f:
f.write(
'This directory is maintained by the pre-commit project.\n'
'Learn more: https://github.com/pre-commit/pre-commit\n',
)
if os.path.exists(self.db_path):
return
with self.exclusive_lock():
# Another process may have already completed this work
if os.path.exists(self.db_path): # pragma: no cover (race)
return
# To avoid a race where someone ^Cs between db creation and
# execution of the CREATE TABLE statement
fd, tmpfile = tempfile.mkstemp(dir=self.directory)
# We'll be managing this file ourselves
os.close(fd)
with self.connect(db_path=tmpfile) as db:
db.executescript(
'CREATE TABLE repos ('
' repo TEXT NOT NULL,'
' ref TEXT NOT NULL,'
' path TEXT NOT NULL,'
' PRIMARY KEY (repo, ref)'
');',
)
self._create_config_table_if_not_exists(db)
# Atomic file move
os.rename(tmpfile, self.db_path)
@contextlib.contextmanager
def exclusive_lock(self):
@@ -46,62 +80,30 @@ class Store(object):
with file_lock.lock(os.path.join(self.directory, '.lock'), blocked_cb):
yield
def _write_readme(self):
with io.open(os.path.join(self.directory, 'README'), 'w') as readme:
readme.write(
'This directory is maintained by the pre-commit project.\n'
'Learn more: https://github.com/pre-commit/pre-commit\n',
)
def _write_sqlite_db(self):
# To avoid a race where someone ^Cs between db creation and execution
# of the CREATE TABLE statement
fd, tmpfile = tempfile.mkstemp(dir=self.directory)
# We'll be managing this file ourselves
os.close(fd)
@contextlib.contextmanager
def connect(self, db_path=None):
db_path = db_path or self.db_path
# sqlite doesn't close its fd with its contextmanager >.<
# contextlib.closing fixes this.
# See: https://stackoverflow.com/a/28032829/812183
with contextlib.closing(sqlite3.connect(tmpfile)) as db:
db.executescript(
'CREATE TABLE repos ('
' repo TEXT NOT NULL,'
' ref TEXT NOT NULL,'
' path TEXT NOT NULL,'
' PRIMARY KEY (repo, ref)'
');',
)
with contextlib.closing(sqlite3.connect(db_path)) as db:
# this creates a transaction
with db:
yield db
# Atomic file move
os.rename(tmpfile, self.db_path)
def _create(self):
if not os.path.exists(self.directory):
os.makedirs(self.directory)
self._write_readme()
if os.path.exists(self.db_path):
return
with self.exclusive_lock():
# Another process may have already completed this work
if os.path.exists(self.db_path): # pragma: no cover (race)
return
self._write_sqlite_db()
def require_created(self):
"""Require the pre-commit file store to be created."""
if not self.__created:
self._create()
self.__created = True
@classmethod
def db_repo_name(cls, repo, deps):
if deps:
return '{}:{}'.format(repo, ','.join(sorted(deps)))
else:
return repo
def _new_repo(self, repo, ref, deps, make_strategy):
self.require_created()
if deps:
repo = '{}:{}'.format(repo, ','.join(sorted(deps)))
repo = self.db_repo_name(repo, deps)
def _get_result():
# Check if we already exist
with sqlite3.connect(self.db_path) as db:
with self.connect() as db:
result = db.execute(
'SELECT path FROM repos WHERE repo = ? AND ref = ?',
(repo, ref),
@@ -125,7 +127,7 @@ class Store(object):
make_strategy(directory)
# Update our db with the created repo
with sqlite3.connect(self.db_path) as db:
with self.connect() as db:
db.execute(
'INSERT INTO repos (repo, ref, path) VALUES (?, ?, ?)',
[repo, ref, directory],
@@ -175,6 +177,43 @@ class Store(object):
'local', C.LOCAL_REPO_VERSION, deps, make_local_strategy,
)
@property
def db_path(self):
return os.path.join(self.directory, 'db.db')
def _create_config_table_if_not_exists(self, db):
db.executescript(
'CREATE TABLE IF NOT EXISTS configs ('
' path TEXT NOT NULL,'
' PRIMARY KEY (path)'
');',
)
def mark_config_used(self, path):
path = os.path.realpath(path)
# don't insert config files that do not exist
if not os.path.exists(path):
return
with self.connect() as db:
# TODO: eventually remove this and only create in _create
self._create_config_table_if_not_exists(db)
db.execute('INSERT OR IGNORE INTO configs VALUES (?)', (path,))
def select_all_configs(self):
with self.connect() as db:
self._create_config_table_if_not_exists(db)
rows = db.execute('SELECT path FROM configs').fetchall()
return [path for path, in rows]
def delete_configs(self, configs):
with self.connect() as db:
rows = [(path,) for path in configs]
db.executemany('DELETE FROM configs WHERE path = ?', rows)
def select_all_repos(self):
with self.connect() as db:
return db.execute('SELECT repo, ref, path from repos').fetchall()
def delete_repo(self, db_repo_name, ref, path):
with self.connect() as db:
db.execute(
'DELETE FROM repos WHERE repo = ? and ref = ?',
(db_repo_name, ref),
)
rmtree(path)

View File

@@ -82,7 +82,7 @@ def modify_config(path='.', commit=True):
git_commit(msg=modify_config.__name__, cwd=path)
def config_with_local_hooks():
def sample_local_config():
return {
'repo': 'local',
'hooks': [{
@@ -94,6 +94,10 @@ def config_with_local_hooks():
}
def sample_meta_config():
return {'repo': 'meta', 'hooks': [{'id': 'check-useless-excludes'}]}
def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
config = {

View File

@@ -11,7 +11,7 @@ from pre_commit.clientlib import MANIFEST_SCHEMA
from pre_commit.clientlib import MigrateShaToRev
from pre_commit.clientlib import validate_config_main
from pre_commit.clientlib import validate_manifest_main
from testing.fixtures import config_with_local_hooks
from testing.fixtures import sample_local_config
from testing.util import get_resource_path
@@ -94,7 +94,7 @@ def test_config_valid(config_obj, expected):
def test_local_hooks_with_rev_fails():
config_obj = {'repos': [config_with_local_hooks()]}
config_obj = {'repos': [sample_local_config()]}
config_obj['repos'][0]['rev'] = 'foo'
with pytest.raises(cfgv.ValidationError):
cfgv.validate(config_obj, CONFIG_SCHEMA)

View File

@@ -15,9 +15,9 @@ from pre_commit.commands.autoupdate import RepositoryCannotBeUpdatedError
from pre_commit.util import cmd_output
from testing.auto_namedtuple import auto_namedtuple
from testing.fixtures import add_config_to_repo
from testing.fixtures import config_with_local_hooks
from testing.fixtures import make_config_from_repo
from testing.fixtures import make_repo
from testing.fixtures import sample_local_config
from testing.fixtures import write_config
from testing.util import get_resource_path
from testing.util import git_commit
@@ -125,7 +125,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name(
stale_config = make_config_from_repo(
out_of_date_repo.path, rev=out_of_date_repo.original_rev, check=False,
)
local_config = config_with_local_hooks()
local_config = sample_local_config()
config = {'repos': [stale_config, local_config]}
# Write out the config
write_config('.', config)
@@ -139,7 +139,7 @@ def test_autoupdate_out_of_date_repo_with_correct_repo_name(
assert ret == 0
assert before != after
assert out_of_date_repo.head_rev in after
assert local_config['repo'] in after
assert 'local' in after
def test_autoupdate_out_of_date_repo_with_wrong_repo_name(
@@ -316,7 +316,7 @@ def test_autoupdate_hook_disappearing_repo(
def test_autoupdate_local_hooks(in_git_dir, store):
config = config_with_local_hooks()
config = sample_local_config()
add_config_to_repo('.', config)
assert autoupdate(C.CONFIG_FILE, store, tags_only=False) == 0
new_config_writen = load_config(C.CONFIG_FILE)
@@ -330,7 +330,7 @@ def test_autoupdate_local_hooks_with_out_of_date_repo(
stale_config = make_config_from_repo(
out_of_date_repo.path, rev=out_of_date_repo.original_rev, check=False,
)
local_config = config_with_local_hooks()
local_config = sample_local_config()
config = {'repos': [local_config, stale_config]}
write_config('.', config)
assert autoupdate(C.CONFIG_FILE, store, tags_only=False) == 0

View File

@@ -21,7 +21,6 @@ def fake_old_dir(tempdir_factory):
def test_clean(store, fake_old_dir):
store.require_created()
assert os.path.exists(fake_old_dir)
assert os.path.exists(store.directory)
clean(store)
@@ -30,6 +29,7 @@ def test_clean(store, fake_old_dir):
def test_clean_idempotent(store):
clean(store)
assert not os.path.exists(store.directory)
clean(store)
assert not os.path.exists(store.directory)

158
tests/commands/gc_test.py Normal file
View File

@@ -0,0 +1,158 @@
import os
import pre_commit.constants as C
from pre_commit import git
from pre_commit.commands.autoupdate import autoupdate
from pre_commit.commands.gc import gc
from pre_commit.repository import all_hooks
from testing.fixtures import make_config_from_repo
from testing.fixtures import make_repo
from testing.fixtures import modify_config
from testing.fixtures import sample_local_config
from testing.fixtures import sample_meta_config
from testing.fixtures import write_config
from testing.util import git_commit
def _repo_count(store):
return len(store.select_all_repos())
def _config_count(store):
return len(store.select_all_configs())
def _remove_config_assert_cleared(store, cap_out):
os.remove(C.CONFIG_FILE)
assert not gc(store)
assert _config_count(store) == 0
assert _repo_count(store) == 0
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
def test_gc(tempdir_factory, store, in_git_dir, cap_out):
path = make_repo(tempdir_factory, 'script_hooks_repo')
old_rev = git.head_rev(path)
git_commit(cwd=path)
write_config('.', make_config_from_repo(path, rev=old_rev))
store.mark_config_used(C.CONFIG_FILE)
# update will clone both the old and new repo, making the old one gc-able
assert not autoupdate(C.CONFIG_FILE, store, tags_only=False)
assert _config_count(store) == 1
assert _repo_count(store) == 2
assert not gc(store)
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'
_remove_config_assert_cleared(store, cap_out)
def test_gc_repo_not_cloned(tempdir_factory, store, in_git_dir, cap_out):
path = make_repo(tempdir_factory, 'script_hooks_repo')
write_config('.', make_config_from_repo(path))
store.mark_config_used(C.CONFIG_FILE)
assert _config_count(store) == 1
assert _repo_count(store) == 0
assert not gc(store)
assert _config_count(store) == 1
assert _repo_count(store) == 0
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
def test_gc_meta_repo_does_not_crash(store, in_git_dir, cap_out):
write_config('.', sample_meta_config())
store.mark_config_used(C.CONFIG_FILE)
assert not gc(store)
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
def test_gc_local_repo_does_not_crash(store, in_git_dir, cap_out):
write_config('.', sample_local_config())
store.mark_config_used(C.CONFIG_FILE)
assert not gc(store)
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
def test_gc_unused_local_repo_with_env(store, in_git_dir, cap_out):
config = {
'repo': 'local',
'hooks': [{
'id': 'flake8', 'name': 'flake8', 'entry': 'flake8',
# a `language: python` local hook will create an environment
'types': ['python'], 'language': 'python',
}],
}
write_config('.', config)
store.mark_config_used(C.CONFIG_FILE)
# this causes the repositories to be created
all_hooks({'repos': [config]}, store)
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert not gc(store)
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
_remove_config_assert_cleared(store, cap_out)
def test_gc_config_with_missing_hook(
tempdir_factory, store, in_git_dir, cap_out,
):
path = make_repo(tempdir_factory, 'script_hooks_repo')
write_config('.', make_config_from_repo(path))
store.mark_config_used(C.CONFIG_FILE)
with modify_config() as config:
# just to trigger a clone
all_hooks(config, store)
# add a hook which does not exist, make sure we don't crash
config['repos'][0]['hooks'].append({'id': 'does-not-exist'})
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert not gc(store)
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
_remove_config_assert_cleared(store, cap_out)
def test_gc_deletes_invalid_configs(store, in_git_dir, cap_out):
config = {'i am': 'invalid'}
write_config('.', config)
store.mark_config_used(C.CONFIG_FILE)
assert _config_count(store) == 1
assert not gc(store)
assert _config_count(store) == 0
assert cap_out.get().splitlines()[-1] == '0 repo(s) removed.'
def test_invalid_manifest_gcd(tempdir_factory, store, in_git_dir, cap_out):
# clean up repos from old pre-commit versions
path = make_repo(tempdir_factory, 'script_hooks_repo')
write_config('.', make_config_from_repo(path))
store.mark_config_used(C.CONFIG_FILE)
# trigger a clone
assert not autoupdate(C.CONFIG_FILE, store, tags_only=False)
# we'll "break" the manifest to simulate an old version clone
(_, _, path), = store.select_all_repos()
os.remove(os.path.join(path, C.MANIFEST_FILE))
assert _config_count(store) == 1
assert _repo_count(store) == 1
assert not gc(store)
assert _config_count(store) == 1
assert _repo_count(store) == 0
assert cap_out.get().splitlines()[-1] == '1 repo(s) removed.'

View File

@@ -22,6 +22,7 @@ from testing.fixtures import add_config_to_repo
from testing.fixtures import make_consuming_repo
from testing.fixtures import modify_config
from testing.fixtures import read_config
from testing.fixtures import sample_meta_config
from testing.util import cmd_output_mocked_pre_commit_home
from testing.util import cwd
from testing.util import git_commit
@@ -656,11 +657,7 @@ def test_pcre_deprecation_warning(cap_out, store, repo_with_passing_hook):
def test_meta_hook_passes(cap_out, store, repo_with_passing_hook):
config = {
'repo': 'meta',
'hooks': [{'id': 'check-useless-excludes'}],
}
add_config_to_repo(repo_with_passing_hook, config)
add_config_to_repo(repo_with_passing_hook, sample_meta_config())
_test_run(
cap_out,

View File

@@ -65,8 +65,8 @@ def test_adjust_args_try_repo_repo_relative(in_git_dir):
FNS = (
'autoupdate', 'clean', 'install', 'install_hooks', 'migrate_config', 'run',
'sample_config', 'uninstall',
'autoupdate', 'clean', 'gc', 'install', 'install_hooks', 'migrate_config',
'run', 'sample_config', 'uninstall',
)
CMDS = tuple(fn.replace('_', '-') for fn in FNS)

View File

@@ -12,7 +12,6 @@ import six
from pre_commit import git
from pre_commit.store import _get_default_directory
from pre_commit.store import Store
from pre_commit.util import rmtree
from testing.fixtures import git_dir
from testing.util import cwd
from testing.util import git_commit
@@ -48,9 +47,7 @@ def test_uses_environment_variable_when_present():
assert ret == '/tmp/pre_commit_home'
def test_store_require_created(store):
assert not os.path.exists(store.directory)
store.require_created()
def test_store_init(store):
# Should create the store directory
assert os.path.exists(store.directory)
# Should create a README file indicating what the directory is about
@@ -63,30 +60,6 @@ def test_store_require_created(store):
assert text_line in readme_contents
def test_store_require_created_does_not_create_twice(store):
assert not os.path.exists(store.directory)
store.require_created()
# We intentionally delete the directory here so we can figure out if it
# calls it again.
rmtree(store.directory)
assert not os.path.exists(store.directory)
# Call require_created, this should not trigger a call to create
store.require_created()
assert not os.path.exists(store.directory)
def test_does_not_recreate_if_directory_already_exists(store):
assert not os.path.exists(store.directory)
# We manually create the directory.
# Note: we're intentionally leaving out the README file. This is so we can
# know that `Store` didn't call create
os.mkdir(store.directory)
open(store.db_path, 'a').close()
# Call require_created, this should not call create
store.require_created()
assert not os.path.exists(os.path.join(store.directory, 'README'))
def test_clone(store, tempdir_factory, log_info_mock):
path = git_dir(tempdir_factory)
with cwd(path):
@@ -110,34 +83,25 @@ def test_clone(store, tempdir_factory, log_info_mock):
assert git.head_rev(ret) == rev
# Assert there's an entry in the sqlite db for this
with sqlite3.connect(store.db_path) as db:
path, = db.execute(
'SELECT path from repos WHERE repo = ? and ref = ?',
(path, rev),
).fetchone()
assert path == ret
assert store.select_all_repos() == [(path, rev, ret)]
def test_clone_cleans_up_on_checkout_failure(store):
try:
with pytest.raises(Exception) as excinfo:
# This raises an exception because you can't clone something that
# doesn't exist!
store.clone('/i_dont_exist_lol', 'fake_rev')
except Exception as e:
assert '/i_dont_exist_lol' in six.text_type(e)
assert '/i_dont_exist_lol' in six.text_type(excinfo.value)
things_starting_with_repo = [
thing for thing in os.listdir(store.directory)
if thing.startswith('repo')
repo_dirs = [
d for d in os.listdir(store.directory) if d.startswith('repo')
]
assert things_starting_with_repo == []
assert repo_dirs == []
def test_clone_when_repo_already_exists(store):
# Create an entry in the sqlite db that makes it look like the repo has
# been cloned.
store.require_created()
with sqlite3.connect(store.db_path) as db:
db.execute(
'INSERT INTO repos (repo, ref, path) '
@@ -147,14 +111,24 @@ def test_clone_when_repo_already_exists(store):
assert store.clone('fake_repo', 'fake_ref') == 'fake_path'
def test_require_created_when_directory_exists_but_not_db(store):
def test_create_when_directory_exists_but_not_db(store):
# In versions <= 0.3.5, there was no sqlite db causing a need for
# backward compatibility
os.makedirs(store.directory)
store.require_created()
os.remove(store.db_path)
store = Store(store.directory)
assert os.path.exists(store.db_path)
def test_create_when_store_already_exists(store):
# an assertion that this is idempotent and does not crash
Store(store.directory)
def test_db_repo_name(store):
assert store.db_repo_name('repo', ()) == 'repo'
assert store.db_repo_name('repo', ('b', 'a', 'c')) == 'repo:a,b,c'
def test_local_resources_reflects_reality():
on_disk = {
res[len('empty_template_'):]
@@ -162,3 +136,35 @@ def test_local_resources_reflects_reality():
if res.startswith('empty_template_')
}
assert on_disk == set(Store.LOCAL_RESOURCES)
def test_mark_config_as_used(store, tmpdir):
with tmpdir.as_cwd():
f = tmpdir.join('f').ensure()
store.mark_config_used('f')
assert store.select_all_configs() == [f.strpath]
def test_mark_config_as_used_idempotent(store, tmpdir):
test_mark_config_as_used(store, tmpdir)
test_mark_config_as_used(store, tmpdir)
def test_mark_config_as_used_does_not_exist(store):
store.mark_config_used('f')
assert store.select_all_configs() == []
def _simulate_pre_1_14_0(store):
with store.connect() as db:
db.executescript('DROP TABLE configs')
def test_select_all_configs_roll_forward(store):
_simulate_pre_1_14_0(store)
assert store.select_all_configs() == []
def test_mark_config_as_used_roll_forward(store, tmpdir):
_simulate_pre_1_14_0(store)
test_mark_config_as_used(store, tmpdir)