mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-04-27 14:57:41 +00:00
parent
b449287784
commit
d454093365
6 changed files with 329 additions and 226 deletions
|
|
@ -6,8 +6,6 @@ import pytest
|
||||||
from vdirsyncer import cli, exceptions
|
from vdirsyncer import cli, exceptions
|
||||||
from vdirsyncer.cli.config import Config
|
from vdirsyncer.cli.config import Config
|
||||||
|
|
||||||
import vdirsyncer.cli.utils # noqa
|
|
||||||
|
|
||||||
|
|
||||||
invalid = object()
|
invalid = object()
|
||||||
|
|
||||||
|
|
@ -92,17 +90,6 @@ def test_invalid_section_type(read_config):
|
||||||
assert 'bogus' in str(excinfo.value)
|
assert 'bogus' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
def test_storage_instance_from_config(monkeypatch):
|
|
||||||
def lol(**kw):
|
|
||||||
assert kw == {'foo': 'bar', 'baz': 1}
|
|
||||||
return 'OK'
|
|
||||||
|
|
||||||
monkeypatch.setitem(cli.utils.storage_names._storages,
|
|
||||||
'lol', lol)
|
|
||||||
config = {'type': 'lol', 'foo': 'bar', 'baz': 1}
|
|
||||||
assert cli.utils.storage_instance_from_config(config) == 'OK'
|
|
||||||
|
|
||||||
|
|
||||||
def test_missing_general_section(read_config):
|
def test_missing_general_section(read_config):
|
||||||
with pytest.raises(exceptions.UserError) as excinfo:
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
read_config(u'''
|
read_config(u'''
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.cli.utils import handle_cli_error
|
from vdirsyncer.cli.utils import handle_cli_error, \
|
||||||
|
storage_instance_from_config, storage_names
|
||||||
|
|
||||||
|
|
||||||
def test_handle_cli_error(capsys):
|
def test_handle_cli_error(capsys):
|
||||||
|
|
@ -11,3 +12,13 @@ def test_handle_cli_error(capsys):
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
assert 'returned something vdirsyncer doesn\'t understand' in err
|
assert 'returned something vdirsyncer doesn\'t understand' in err
|
||||||
assert 'ayy lmao' in err
|
assert 'ayy lmao' in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_storage_instance_from_config(monkeypatch):
|
||||||
|
def lol(**kw):
|
||||||
|
assert kw == {'foo': 'bar', 'baz': 1}
|
||||||
|
return 'OK'
|
||||||
|
|
||||||
|
monkeypatch.setitem(storage_names._storages, 'lol', lol)
|
||||||
|
config = {'type': 'lol', 'foo': 'bar', 'baz': 1}
|
||||||
|
assert storage_instance_from_config(config) == 'OK'
|
||||||
|
|
|
||||||
67
tests/unit/cli/test_discover.py
Normal file
67
tests/unit/cli/test_discover.py
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer.cli.discover import expand_collections
|
||||||
|
|
||||||
|
|
||||||
|
missing = object()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shortcuts,expected', [
|
||||||
|
(['from a'], [
|
||||||
|
('c1', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'},
|
||||||
|
{'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'})),
|
||||||
|
('c2', ({'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'},
|
||||||
|
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})),
|
||||||
|
('a3', ({'type': 'fooboo', 'custom_arg': 'a3', 'collection': 'a3'},
|
||||||
|
missing))
|
||||||
|
]),
|
||||||
|
(['from b'], [
|
||||||
|
('c1', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'},
|
||||||
|
{'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'})),
|
||||||
|
('c2', ({'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'},
|
||||||
|
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})),
|
||||||
|
('b3', (missing,
|
||||||
|
{'type': 'fooboo', 'custom_arg': 'b3', 'collection': 'b3'}))
|
||||||
|
]),
|
||||||
|
(None, [
|
||||||
|
(None, ({'type': 'fooboo', 'storage_side': 'a', 'collection': None},
|
||||||
|
{'type': 'fooboo', 'storage_side': 'b', 'collection': None}))
|
||||||
|
]),
|
||||||
|
([None], [
|
||||||
|
(None, ({'type': 'fooboo', 'storage_side': 'a', 'collection': None},
|
||||||
|
{'type': 'fooboo', 'storage_side': 'b', 'collection': None}))
|
||||||
|
]),
|
||||||
|
])
|
||||||
|
def test_expand_collections(shortcuts, expected):
|
||||||
|
config_a = {
|
||||||
|
'type': 'fooboo',
|
||||||
|
'storage_side': 'a'
|
||||||
|
}
|
||||||
|
|
||||||
|
config_b = {
|
||||||
|
'type': 'fooboo',
|
||||||
|
'storage_side': 'b'
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_discovered_a():
|
||||||
|
return {
|
||||||
|
'c1': {'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'},
|
||||||
|
'c2': {'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'},
|
||||||
|
'a3': {'type': 'fooboo', 'custom_arg': 'a3', 'collection': 'a3'}
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_discovered_b():
|
||||||
|
return {
|
||||||
|
'c1': {'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'},
|
||||||
|
'c2': {'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'},
|
||||||
|
'b3': {'type': 'fooboo', 'custom_arg': 'b3', 'collection': 'b3'}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert list(expand_collections(
|
||||||
|
shortcuts,
|
||||||
|
config_a, config_b,
|
||||||
|
get_discovered_a, get_discovered_b,
|
||||||
|
lambda config, collection: missing
|
||||||
|
)) == expected
|
||||||
219
vdirsyncer/cli/discover.py
Normal file
219
vdirsyncer/cli/discover.py
Normal file
|
|
@ -0,0 +1,219 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .utils import handle_collection_not_found, handle_storage_init_error, \
|
||||||
|
load_status, save_status, storage_class_from_config, \
|
||||||
|
storage_instance_from_config
|
||||||
|
|
||||||
|
from .. import exceptions
|
||||||
|
from ..utils import cached_property
|
||||||
|
|
||||||
|
|
||||||
|
# Increase whenever upgrade potentially breaks discovery cache and collections
|
||||||
|
# should be re-discovered
|
||||||
|
DISCOVERY_CACHE_VERSION = 1
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_collections_cache_key(pair):
|
||||||
|
m = hashlib.sha256()
|
||||||
|
j = json.dumps([
|
||||||
|
DISCOVERY_CACHE_VERSION,
|
||||||
|
pair.collections,
|
||||||
|
pair.config_a,
|
||||||
|
pair.config_b,
|
||||||
|
], sort_keys=True)
|
||||||
|
m.update(j.encode('utf-8'))
|
||||||
|
return m.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def collections_for_pair(status_path, pair, from_cache=True,
|
||||||
|
list_collections=False):
|
||||||
|
'''Determine all configured collections for a given pair. Takes care of
|
||||||
|
shortcut expansion and result caching.
|
||||||
|
|
||||||
|
:param status_path: The path to the status directory.
|
||||||
|
:param from_cache: Whether to load from cache (aborting on cache miss) or
|
||||||
|
discover and save to cache.
|
||||||
|
|
||||||
|
:returns: iterable of (collection, (a_args, b_args))
|
||||||
|
'''
|
||||||
|
cache_key = _get_collections_cache_key(pair)
|
||||||
|
if from_cache:
|
||||||
|
rv = load_status(status_path, pair.name, data_type='collections')
|
||||||
|
if rv and rv.get('cache_key', None) == cache_key:
|
||||||
|
return list(_expand_collections_cache(
|
||||||
|
rv['collections'], pair.config_a, pair.config_b
|
||||||
|
))
|
||||||
|
elif rv:
|
||||||
|
raise exceptions.UserError('Detected change in config file, '
|
||||||
|
'please run `vdirsyncer discover {}`.'
|
||||||
|
.format(pair.name))
|
||||||
|
else:
|
||||||
|
raise exceptions.UserError('Please run `vdirsyncer discover {}` '
|
||||||
|
' before synchronization.'
|
||||||
|
.format(pair.name))
|
||||||
|
|
||||||
|
logger.info('Discovering collections for pair {}' .format(pair.name))
|
||||||
|
|
||||||
|
a_discovered = _DiscoverResult(pair.config_a)
|
||||||
|
b_discovered = _DiscoverResult(pair.config_b)
|
||||||
|
|
||||||
|
if list_collections:
|
||||||
|
_print_collections(pair.config_a['instance_name'],
|
||||||
|
a_discovered.get_self())
|
||||||
|
_print_collections(pair.config_b['instance_name'],
|
||||||
|
b_discovered.get_self())
|
||||||
|
|
||||||
|
# We have to use a list here because the special None/null value would get
|
||||||
|
# mangled to string (because JSON objects always have string keys).
|
||||||
|
rv = list(expand_collections(
|
||||||
|
shortcuts=pair.collections,
|
||||||
|
config_a=pair.config_a,
|
||||||
|
config_b=pair.config_b,
|
||||||
|
get_a_discovered=a_discovered.get_self,
|
||||||
|
get_b_discovered=b_discovered.get_self,
|
||||||
|
_handle_collection_not_found=handle_collection_not_found
|
||||||
|
))
|
||||||
|
|
||||||
|
_sanity_check_collections(rv)
|
||||||
|
|
||||||
|
save_status(status_path, pair.name, data_type='collections',
|
||||||
|
data={
|
||||||
|
'collections': list(
|
||||||
|
_compress_collections_cache(rv, pair.config_a,
|
||||||
|
pair.config_b)
|
||||||
|
),
|
||||||
|
'cache_key': cache_key
|
||||||
|
})
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
def _sanity_check_collections(collections):
|
||||||
|
for collection, (a_args, b_args) in collections:
|
||||||
|
storage_instance_from_config(a_args)
|
||||||
|
storage_instance_from_config(b_args)
|
||||||
|
|
||||||
|
|
||||||
|
def _compress_collections_cache(collections, config_a, config_b):
|
||||||
|
def deduplicate(x, y):
|
||||||
|
rv = {}
|
||||||
|
for key, value in x.items():
|
||||||
|
if key not in y or y[key] != value:
|
||||||
|
rv[key] = value
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
for name, (a, b) in collections:
|
||||||
|
yield name, (deduplicate(a, config_a), deduplicate(b, config_b))
|
||||||
|
|
||||||
|
|
||||||
|
def _expand_collections_cache(collections, config_a, config_b):
|
||||||
|
for name, (a_delta, b_delta) in collections:
|
||||||
|
a = dict(config_a)
|
||||||
|
a.update(a_delta)
|
||||||
|
|
||||||
|
b = dict(config_b)
|
||||||
|
b.update(b_delta)
|
||||||
|
|
||||||
|
yield name, (a, b)
|
||||||
|
|
||||||
|
|
||||||
|
class _DiscoverResult:
|
||||||
|
def __init__(self, config):
|
||||||
|
self._cls, _ = storage_class_from_config(config)
|
||||||
|
self._config = config
|
||||||
|
|
||||||
|
def get_self(self):
|
||||||
|
return self._discovered
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def _discovered(self):
|
||||||
|
try:
|
||||||
|
discovered = list(self._cls.discover(**self._config))
|
||||||
|
except NotImplementedError:
|
||||||
|
return {}
|
||||||
|
except Exception:
|
||||||
|
return handle_storage_init_error(self._cls, self._config)
|
||||||
|
else:
|
||||||
|
storage_type = self._config['type']
|
||||||
|
rv = {}
|
||||||
|
for args in discovered:
|
||||||
|
args['type'] = storage_type
|
||||||
|
rv[args['collection']] = args
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
def expand_collections(shortcuts, config_a, config_b, get_a_discovered,
|
||||||
|
get_b_discovered, _handle_collection_not_found):
|
||||||
|
handled_collections = set()
|
||||||
|
|
||||||
|
if shortcuts is None:
|
||||||
|
shortcuts = [None]
|
||||||
|
|
||||||
|
for shortcut in shortcuts:
|
||||||
|
if shortcut == 'from a':
|
||||||
|
collections = get_a_discovered()
|
||||||
|
elif shortcut == 'from b':
|
||||||
|
collections = get_b_discovered()
|
||||||
|
else:
|
||||||
|
collections = [shortcut]
|
||||||
|
|
||||||
|
for collection in collections:
|
||||||
|
if isinstance(collection, list):
|
||||||
|
collection, collection_a, collection_b = collection
|
||||||
|
else:
|
||||||
|
collection_a = collection_b = collection
|
||||||
|
|
||||||
|
assert collection not in handled_collections
|
||||||
|
handled_collections.add(collection)
|
||||||
|
|
||||||
|
a_args = _collection_from_discovered(
|
||||||
|
get_a_discovered, collection_a, config_a,
|
||||||
|
_handle_collection_not_found
|
||||||
|
)
|
||||||
|
b_args = _collection_from_discovered(
|
||||||
|
get_b_discovered, collection_b, config_b,
|
||||||
|
_handle_collection_not_found
|
||||||
|
)
|
||||||
|
|
||||||
|
yield collection, (a_args, b_args)
|
||||||
|
|
||||||
|
|
||||||
|
def _collection_from_discovered(get_discovered, collection, config,
|
||||||
|
_handle_collection_not_found):
|
||||||
|
if collection is None:
|
||||||
|
args = dict(config)
|
||||||
|
args['collection'] = None
|
||||||
|
return args
|
||||||
|
|
||||||
|
try:
|
||||||
|
return get_discovered()[collection]
|
||||||
|
except KeyError:
|
||||||
|
return _handle_collection_not_found(config, collection)
|
||||||
|
|
||||||
|
|
||||||
|
def _print_collections(instance_name, discovered):
|
||||||
|
logger.info('{}:'.format(instance_name))
|
||||||
|
for args in discovered.values():
|
||||||
|
collection = args['collection']
|
||||||
|
if collection is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
args['instance_name'] = instance_name
|
||||||
|
try:
|
||||||
|
storage = storage_instance_from_config(args, create=False)
|
||||||
|
displayname = storage.get_meta('displayname')
|
||||||
|
except Exception:
|
||||||
|
displayname = u''
|
||||||
|
|
||||||
|
logger.info(' - {}{}'.format(
|
||||||
|
json.dumps(collection),
|
||||||
|
' ("{}")'.format(displayname)
|
||||||
|
if displayname and displayname != collection
|
||||||
|
else ''
|
||||||
|
))
|
||||||
|
|
@ -4,9 +4,10 @@ import functools
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from .config import CollectionConfig
|
from .config import CollectionConfig
|
||||||
from .utils import JobFailed, cli_logger, collections_for_pair, \
|
from .discover import collections_for_pair, storage_class_from_config, \
|
||||||
get_status_name, handle_cli_error, load_status, save_status, \
|
storage_instance_from_config
|
||||||
storage_class_from_config, storage_instance_from_config
|
from .utils import JobFailed, cli_logger, get_status_name, handle_cli_error, \
|
||||||
|
load_status, save_status
|
||||||
|
|
||||||
from .. import exceptions
|
from .. import exceptions
|
||||||
from ..sync import sync
|
from ..sync import sync
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import hashlib
|
|
||||||
import importlib
|
import importlib
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
|
@ -29,10 +28,6 @@ except ImportError:
|
||||||
STATUS_PERMISSIONS = 0o600
|
STATUS_PERMISSIONS = 0o600
|
||||||
STATUS_DIR_PERMISSIONS = 0o700
|
STATUS_DIR_PERMISSIONS = 0o700
|
||||||
|
|
||||||
# Increase whenever upgrade potentially breaks discovery cache and collections
|
|
||||||
# should be re-discovered
|
|
||||||
DISCOVERY_CACHE_VERSION = 1
|
|
||||||
|
|
||||||
|
|
||||||
class _StorageIndex(object):
|
class _StorageIndex(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
@ -171,215 +166,13 @@ def get_status_name(pair, collection):
|
||||||
return pair + '/' + collection
|
return pair + '/' + collection
|
||||||
|
|
||||||
|
|
||||||
def _get_collections_cache_key(pair):
|
|
||||||
m = hashlib.sha256()
|
|
||||||
j = json.dumps([
|
|
||||||
DISCOVERY_CACHE_VERSION,
|
|
||||||
pair.collections,
|
|
||||||
pair.config_a,
|
|
||||||
pair.config_b,
|
|
||||||
], sort_keys=True)
|
|
||||||
m.update(j.encode('utf-8'))
|
|
||||||
return m.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def collections_for_pair(status_path, pair, from_cache=True,
|
|
||||||
list_collections=False):
|
|
||||||
'''Determine all configured collections for a given pair. Takes care of
|
|
||||||
shortcut expansion and result caching.
|
|
||||||
|
|
||||||
:param status_path: The path to the status directory.
|
|
||||||
:param from_cache: Whether to load from cache (aborting on cache miss) or
|
|
||||||
discover and save to cache.
|
|
||||||
|
|
||||||
:returns: iterable of (collection, (a_args, b_args))
|
|
||||||
'''
|
|
||||||
cache_key = _get_collections_cache_key(pair)
|
|
||||||
if from_cache:
|
|
||||||
rv = load_status(status_path, pair.name, data_type='collections')
|
|
||||||
if rv and rv.get('cache_key', None) == cache_key:
|
|
||||||
return list(_expand_collections_cache(
|
|
||||||
rv['collections'], pair.config_a, pair.config_b
|
|
||||||
))
|
|
||||||
elif rv:
|
|
||||||
raise exceptions.UserError('Detected change in config file, '
|
|
||||||
'please run `vdirsyncer discover {}`.'
|
|
||||||
.format(pair.name))
|
|
||||||
else:
|
|
||||||
raise exceptions.UserError('Please run `vdirsyncer discover {}` '
|
|
||||||
' before synchronization.'
|
|
||||||
.format(pair.name))
|
|
||||||
|
|
||||||
cli_logger.info('Discovering collections for pair {}'
|
|
||||||
.format(pair.name))
|
|
||||||
|
|
||||||
# We have to use a list here because the special None/null value would get
|
|
||||||
# mangled to string (because JSON objects always have string keys).
|
|
||||||
rv = list(_collections_for_pair_impl(status_path, pair,
|
|
||||||
list_collections=list_collections))
|
|
||||||
|
|
||||||
save_status(status_path, pair.name, data_type='collections',
|
|
||||||
data={
|
|
||||||
'collections': list(
|
|
||||||
_compress_collections_cache(rv, pair.config_a,
|
|
||||||
pair.config_b)
|
|
||||||
),
|
|
||||||
'cache_key': cache_key
|
|
||||||
})
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
def _compress_collections_cache(collections, config_a, config_b):
|
|
||||||
def deduplicate(x, y):
|
|
||||||
rv = {}
|
|
||||||
for key, value in x.items():
|
|
||||||
if key not in y or y[key] != value:
|
|
||||||
rv[key] = value
|
|
||||||
|
|
||||||
return rv
|
|
||||||
|
|
||||||
for name, (a, b) in collections:
|
|
||||||
yield name, (deduplicate(a, config_a), deduplicate(b, config_b))
|
|
||||||
|
|
||||||
|
|
||||||
def _expand_collections_cache(collections, config_a, config_b):
|
|
||||||
for name, (a_delta, b_delta) in collections:
|
|
||||||
a = dict(config_a)
|
|
||||||
a.update(a_delta)
|
|
||||||
|
|
||||||
b = dict(config_b)
|
|
||||||
b.update(b_delta)
|
|
||||||
|
|
||||||
yield name, (a, b)
|
|
||||||
|
|
||||||
|
|
||||||
def _discover_from_config(config):
|
|
||||||
storage_type = config['type']
|
|
||||||
cls, config = storage_class_from_config(config)
|
|
||||||
|
|
||||||
discovered = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
discovered.extend(cls.discover(**config))
|
|
||||||
except NotImplementedError:
|
|
||||||
pass
|
|
||||||
except Exception:
|
|
||||||
return handle_storage_init_error(cls, config)
|
|
||||||
|
|
||||||
rv = {}
|
|
||||||
for args in discovered:
|
|
||||||
args['type'] = storage_type
|
|
||||||
rv[args['collection']] = args
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_collection_not_found(config, collection, e=None):
|
|
||||||
storage_name = config.get('instance_name', None)
|
|
||||||
|
|
||||||
cli_logger.warning('{}No collection {} found for storage {}.'
|
|
||||||
.format('{}\n'.format(e) if e else '',
|
|
||||||
json.dumps(collection), storage_name))
|
|
||||||
|
|
||||||
if click.confirm('Should vdirsyncer attempt to create it?'):
|
|
||||||
storage_type = config['type']
|
|
||||||
cls, config = storage_class_from_config(config)
|
|
||||||
config['collection'] = collection
|
|
||||||
try:
|
|
||||||
args = cls.create_collection(**config)
|
|
||||||
args['type'] = storage_type
|
|
||||||
return args
|
|
||||||
except NotImplementedError as e:
|
|
||||||
cli_logger.error(e)
|
|
||||||
|
|
||||||
raise exceptions.UserError(
|
|
||||||
'Unable to find or create collection "{collection}" for '
|
|
||||||
'storage "{storage}". Please create the collection '
|
|
||||||
'yourself.'.format(collection=collection,
|
|
||||||
storage=storage_name))
|
|
||||||
|
|
||||||
|
|
||||||
def _print_collections(base_config, discovered):
|
|
||||||
instance_name = base_config['instance_name']
|
|
||||||
cli_logger.info('{}:'.format(instance_name))
|
|
||||||
for args in discovered.values():
|
|
||||||
collection = args['collection']
|
|
||||||
if collection is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
args['instance_name'] = instance_name
|
|
||||||
try:
|
|
||||||
storage = storage_instance_from_config(args, create=False)
|
|
||||||
displayname = storage.get_meta('displayname')
|
|
||||||
except Exception:
|
|
||||||
displayname = u''
|
|
||||||
|
|
||||||
cli_logger.info(' - {}{}'.format(
|
|
||||||
json.dumps(collection),
|
|
||||||
' ("{}")'.format(displayname)
|
|
||||||
if displayname and displayname != collection
|
|
||||||
else ''
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def _collections_for_pair_impl(status_path, pair, list_collections=False):
|
|
||||||
handled_collections = set()
|
|
||||||
|
|
||||||
shortcuts = pair.collections
|
|
||||||
if shortcuts is None:
|
|
||||||
shortcuts = [None]
|
|
||||||
|
|
||||||
a_discovered = _discover_from_config(pair.config_a)
|
|
||||||
b_discovered = _discover_from_config(pair.config_b)
|
|
||||||
|
|
||||||
if list_collections:
|
|
||||||
_print_collections(pair.config_a, a_discovered)
|
|
||||||
_print_collections(pair.config_b, b_discovered)
|
|
||||||
|
|
||||||
for shortcut in shortcuts:
|
|
||||||
if shortcut == 'from a':
|
|
||||||
collections = a_discovered
|
|
||||||
elif shortcut == 'from b':
|
|
||||||
collections = b_discovered
|
|
||||||
else:
|
|
||||||
collections = [shortcut]
|
|
||||||
|
|
||||||
for collection in collections:
|
|
||||||
if isinstance(collection, list):
|
|
||||||
collection, collection_a, collection_b = collection
|
|
||||||
else:
|
|
||||||
collection_a = collection_b = collection
|
|
||||||
|
|
||||||
if collection in handled_collections:
|
|
||||||
continue
|
|
||||||
handled_collections.add(collection)
|
|
||||||
|
|
||||||
a_args = _collection_from_discovered(a_discovered, collection_a,
|
|
||||||
pair.config_a)
|
|
||||||
b_args = _collection_from_discovered(b_discovered, collection_b,
|
|
||||||
pair.config_b)
|
|
||||||
|
|
||||||
yield collection, (a_args, b_args)
|
|
||||||
|
|
||||||
|
|
||||||
def _collection_from_discovered(discovered, collection, config):
|
|
||||||
if collection is None:
|
|
||||||
args = dict(config)
|
|
||||||
args['collection'] = None
|
|
||||||
storage_instance_from_config(args)
|
|
||||||
return args
|
|
||||||
|
|
||||||
try:
|
|
||||||
return discovered[collection]
|
|
||||||
except KeyError:
|
|
||||||
return _handle_collection_not_found(config, collection)
|
|
||||||
|
|
||||||
|
|
||||||
def load_status(base_path, pair, collection=None, data_type=None):
|
def load_status(base_path, pair, collection=None, data_type=None):
|
||||||
assert data_type is not None
|
assert data_type is not None
|
||||||
status_name = get_status_name(pair, collection)
|
status_name = get_status_name(pair, collection)
|
||||||
path = expand_path(os.path.join(base_path, status_name))
|
path = expand_path(os.path.join(base_path, status_name))
|
||||||
if os.path.isfile(path) and data_type == 'items':
|
if os.path.isfile(path) and data_type == 'items':
|
||||||
new_path = path + '.items'
|
new_path = path + '.items'
|
||||||
|
# XXX: Legacy migration
|
||||||
cli_logger.warning('Migrating statuses: Renaming {} to {}'
|
cli_logger.warning('Migrating statuses: Renaming {} to {}'
|
||||||
.format(path, new_path))
|
.format(path, new_path))
|
||||||
os.rename(path, new_path)
|
os.rename(path, new_path)
|
||||||
|
|
@ -441,7 +234,7 @@ def storage_instance_from_config(config, create=True):
|
||||||
return cls(**new_config)
|
return cls(**new_config)
|
||||||
except exceptions.CollectionNotFound as e:
|
except exceptions.CollectionNotFound as e:
|
||||||
if create:
|
if create:
|
||||||
config = _handle_collection_not_found(
|
config = handle_collection_not_found(
|
||||||
config, config.get('collection', None), e=str(e))
|
config, config.get('collection', None), e=str(e))
|
||||||
return storage_instance_from_config(config, create=False)
|
return storage_instance_from_config(config, create=False)
|
||||||
else:
|
else:
|
||||||
|
|
@ -592,3 +385,28 @@ def assert_permissions(path, wanted):
|
||||||
cli_logger.warning('Correcting permissions of {} from {:o} to {:o}'
|
cli_logger.warning('Correcting permissions of {} from {:o} to {:o}'
|
||||||
.format(path, permissions, wanted))
|
.format(path, permissions, wanted))
|
||||||
os.chmod(path, wanted)
|
os.chmod(path, wanted)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_collection_not_found(config, collection, e=None):
|
||||||
|
storage_name = config.get('instance_name', None)
|
||||||
|
|
||||||
|
cli_logger.warning('{}No collection {} found for storage {}.'
|
||||||
|
.format('{}\n'.format(e) if e else '',
|
||||||
|
json.dumps(collection), storage_name))
|
||||||
|
|
||||||
|
if click.confirm('Should vdirsyncer attempt to create it?'):
|
||||||
|
storage_type = config['type']
|
||||||
|
cls, config = storage_class_from_config(config)
|
||||||
|
config['collection'] = collection
|
||||||
|
try:
|
||||||
|
args = cls.create_collection(**config)
|
||||||
|
args['type'] = storage_type
|
||||||
|
return args
|
||||||
|
except NotImplementedError as e:
|
||||||
|
cli_logger.error(e)
|
||||||
|
|
||||||
|
raise exceptions.UserError(
|
||||||
|
'Unable to find or create collection "{collection}" for '
|
||||||
|
'storage "{storage}". Please create the collection '
|
||||||
|
'yourself.'.format(collection=collection,
|
||||||
|
storage=storage_name))
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue