vdirsyncer can now be somewhat used

This commit is contained in:
Markus Unterwaditzer 2014-02-27 14:14:37 +01:00
parent dc12b74805
commit 46fa1d7c47
8 changed files with 124 additions and 45 deletions

View file

@ -15,9 +15,13 @@ from vdirsyncer.sync import sync
from vdirsyncer.storage.caldav import CaldavStorage from vdirsyncer.storage.caldav import CaldavStorage
from vdirsyncer.storage.filesystem import FilesystemStorage from vdirsyncer.storage.filesystem import FilesystemStorage
from vdirsyncer.utils import expand_path from vdirsyncer.utils import expand_path
import vdirsyncer.log as log
import argvard import argvard
cli_logger = log.get('cli')
storage_names = { storage_names = {
'caldav': CaldavStorage, 'caldav': CaldavStorage,
'filesystem': FilesystemStorage 'filesystem': FilesystemStorage
@ -43,7 +47,8 @@ def get_config_parser(env):
elif section == 'general': elif section == 'general':
general = dict(c.items(section)) general = dict(c.items(section))
else: else:
raise RuntimeError('Unknown section: {}'.format(section)) cli_logger.error(
'Unknown section in {}: {}'.format(fname, section))
return general, pairs, storages return general, pairs, storages
@ -61,16 +66,31 @@ def save_status(basepath, pair_name, status):
with open(full_path, 'w+') as f: with open(full_path, 'w+') as f:
for k, v in status.items(): for k, v in status.items():
json.dump((k, v), f) json.dump((k, v), f)
f.write('\n')
def storage_instance_from_config(config): def storage_instance_from_config(config):
config = dict(config) config = dict(config)
cls = storage_names[config.pop('type')] storage_name = config.pop('type')
cls = storage_names[storage_name]
try: try:
return cls(**config) return cls(**config)
except TypeError: except TypeError as e:
print(config) import inspect
raise x = cli_logger.critical
spec = inspect.getargspec(cls.__init__)
required_args = set(spec.args[:-len(spec.defaults)])
x(str(e))
x('')
x('Unable to initialize storage {}.'.format(storage_name))
x('Here are the required arguments for the storage:')
x(list(required_args - {'self'}))
x('Here are the optional arguments:')
x(list(set(spec.args) - required_args))
x('And here are the ones you gave: ')
x(list(config))
sys.exit(1)
def main(): def main():
@ -86,11 +106,15 @@ def _main(env, file_cfg):
@app.main() @app.main()
def app_main(context): def app_main(context):
print("heY") print("Hello.")
@app.option('--debug|-v')
def debug_option(context):
log.get('cli').setLevel(log.logging.DEBUG)
log.get('sync').setLevel(log.logging.DEBUG)
sync_command = argvard.Command() sync_command = argvard.Command()
@sync_command.main('[pairs...]') @sync_command.main('[pairs...]')
def sync_main(context, pairs=None): def sync_main(context, pairs=None):
if pairs is None: if pairs is None:
@ -100,13 +124,15 @@ def _main(env, file_cfg):
try: try:
a, b = all_pairs[pair_name] a, b = all_pairs[pair_name]
except KeyError: except KeyError:
print('Pair not found: {}'.format(pair_name)) cli_logger.critical('Pair not found: {}'.format(pair_name))
print(file_cfg) cli_logger.critical('These are the pairs found: ')
cli_logger.critical(list(all_pairs))
sys.exit(1) sys.exit(1)
a = storage_instance_from_config(all_storages[a]) a = storage_instance_from_config(all_storages[a])
b = storage_instance_from_config(all_storages[b]) b = storage_instance_from_config(all_storages[b])
def x(a=a, b=b, pair_name=pair_name): def x(a=a, b=b, pair_name=pair_name):
cli_logger.debug('Syncing {}'.format(pair_name))
status = load_status(general['status_path'], pair_name) status = load_status(general['status_path'], pair_name)
sync(a, b, status) sync(a, b, status)
save_status(general['status_path'], pair_name, status) save_status(general['status_path'], pair_name, status)

30
vdirsyncer/log.py Normal file
View file

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
'''
vdirsyncer.log
~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
import logging
import sys
stdout_handler = logging.StreamHandler(sys.stdout)
def create_logger(name):
x = logging.getLogger(name)
x.setLevel(logging.WARNING)
x.addHandler(stdout_handler)
return x
loggers = {}
def get(name):
name = 'watdo.' + name
if name not in loggers:
loggers[name] = create_logger(name)
return loggers[name]

View file

@ -13,16 +13,14 @@ class Item(object):
'''should-be-immutable wrapper class for VCALENDAR and VCARD''' '''should-be-immutable wrapper class for VCALENDAR and VCARD'''
def __init__(self, raw): def __init__(self, raw):
self.raw = raw assert type(raw) is unicode
self._uid = None raw = raw.splitlines()
self.uid = None
@property for line in raw:
def uid(self): if line.startswith(u'UID:'):
if self._uid is None: self.uid = line[4:].strip()
for line in self.raw.splitlines(): self.raw = '\n'.join(raw)
if line.startswith(b'UID:'):
self._uid = line[4:].strip()
return self._uid
class Storage(object): class Storage(object):

View file

@ -13,6 +13,7 @@ from .base import Storage, Item
import vdirsyncer.exceptions as exceptions import vdirsyncer.exceptions as exceptions
from lxml import etree from lxml import etree
import requests import requests
import urlparse
import datetime import datetime
CALDAV_DT_FORMAT = '%Y%m%dT%H%M%SZ' CALDAV_DT_FORMAT = '%Y%m%dT%H%M%SZ'
@ -53,6 +54,7 @@ class CaldavStorage(Storage):
self.useragent = useragent self.useragent = useragent
self.url = url.rstrip('/') + '/' self.url = url.rstrip('/') + '/'
self.parsed_url = urlparse.urlparse(self.url)
self.start_date = start_date self.start_date = start_date
self.end_date = end_date self.end_date = end_date
@ -74,8 +76,10 @@ class CaldavStorage(Storage):
} }
def _simplify_href(self, href): def _simplify_href(self, href):
if href.startswith(self.url): href = urlparse.urlparse(href).path
return href[len(self.url):] if href.startswith(self.parsed_url.path):
href = href[len(self.parsed_url.path):]
assert '/' not in href, href
return href return href
def _request(self, method, item, data=None, headers=None): def _request(self, method, item, data=None, headers=None):
@ -144,7 +148,7 @@ class CaldavStorage(Storage):
</C:calendar-multiget>''' </C:calendar-multiget>'''
href_xml = [] href_xml = []
for href in hrefs: for href in hrefs:
assert '/' not in href assert '/' not in href, href
href_xml.append('<D:href>{}</D:href>'.format(self.url + href)) href_xml.append('<D:href>{}</D:href>'.format(self.url + href))
data = data.format(hrefs='\n'.join(href_xml)) data = data.format(hrefs='\n'.join(href_xml))
response = self._request( response = self._request(
@ -154,19 +158,24 @@ class CaldavStorage(Storage):
headers=self._default_headers() headers=self._default_headers()
) )
response.raise_for_status() response.raise_for_status()
root = etree.XML(response.content) root = etree.XML(response.content) # etree only can handle bytes
rv = [] rv = []
hrefs_left = set(hrefs) hrefs_left = set(hrefs)
for element in root.iter('{DAV:}response'): for element in root.iter('{DAV:}response'):
href = self._simplify_href(element.find('{DAV:}href').text) href = self._simplify_href(
element.find('{DAV:}href').text.decode(response.encoding))
obj = element \ obj = element \
.find('{DAV:}propstat') \ .find('{DAV:}propstat') \
.find('{DAV:}prop') \ .find('{DAV:}prop') \
.find('{urn:ietf:params:xml:ns:caldav}calendar-data').text .find('{urn:ietf:params:xml:ns:caldav}calendar-data').text
etag = element \ etag = element \
.find('{DAV:}propstat') \ .find('{DAV:}propstat') \
.find('{DAV:}prop') \ .find('{DAV:}prop') \
.find('{DAV:}getetag').text .find('{DAV:}getetag').text
if isinstance(obj, bytes):
obj = obj.decode(response.encoding)
if isinstance(etag, bytes):
etag = etag.decode(response.encoding)
rv.append((href, Item(obj), etag)) rv.append((href, Item(obj), etag))
hrefs_left.remove(href) hrefs_left.remove(href)
for href in hrefs_left: for href in hrefs_left:

View file

@ -19,11 +19,12 @@ class FilesystemStorage(Storage):
mtime is etag mtime is etag
filename without path is href''' filename without path is href'''
def __init__(self, path, fileext, **kwargs): def __init__(self, path, fileext, encoding='utf-8', **kwargs):
''' '''
:param path: Absolute path to a *collection* inside a vdir. :param path: Absolute path to a *collection* inside a vdir.
''' '''
self.path = expand_path(path) self.path = expand_path(path)
self.encoding = encoding
self.fileext = fileext self.fileext = fileext
super(FilesystemStorage, self).__init__(**kwargs) super(FilesystemStorage, self).__init__(**kwargs)
@ -42,7 +43,7 @@ class FilesystemStorage(Storage):
def get(self, href): def get(self, href):
fpath = self._get_filepath(href) fpath = self._get_filepath(href)
with open(fpath, 'rb') as f: with open(fpath, 'rb') as f:
return Item(f.read()), os.path.getmtime(fpath) return Item(f.read().decode(self.encoding)), os.path.getmtime(fpath)
def has(self, href): def has(self, href):
return os.path.isfile(self._get_filepath(href)) return os.path.isfile(self._get_filepath(href))
@ -53,7 +54,7 @@ class FilesystemStorage(Storage):
if os.path.exists(fpath): if os.path.exists(fpath):
raise exceptions.AlreadyExistingError(obj.uid) raise exceptions.AlreadyExistingError(obj.uid)
with open(fpath, 'wb+') as f: with open(fpath, 'wb+') as f:
f.write(obj.raw) f.write(obj.raw.encode(self.encoding))
return href, os.path.getmtime(fpath) return href, os.path.getmtime(fpath)
def update(self, href, obj, etag): def update(self, href, obj, etag):
@ -67,7 +68,7 @@ class FilesystemStorage(Storage):
raise exceptions.WrongEtagError(etag, actual_etag) raise exceptions.WrongEtagError(etag, actual_etag)
with open(fpath, 'wb') as f: with open(fpath, 'wb') as f:
f.write(obj.raw) f.write(obj.raw.encode('utf-8'))
return os.path.getmtime(fpath) return os.path.getmtime(fpath)
def delete(self, href, etag): def delete(self, href, etag):

View file

@ -12,6 +12,9 @@
:copyright: (c) 2014 Markus Unterwaditzer :copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details. :license: MIT, see LICENSE for more details.
''' '''
import vdirsyncer.exceptions as exceptions
import vdirsyncer.log
sync_logger = vdirsyncer.log.get('sync')
def prepare_list(storage, href_to_uid): def prepare_list(storage, href_to_uid):
@ -49,10 +52,14 @@ def sync(storage_a, storage_b, status):
modified by the function and should be passed to it at the next sync. modified by the function and should be passed to it at the next sync.
If this is the first sync, an empty dictionary should be provided. If this is the first sync, an empty dictionary should be provided.
''' '''
a_href_to_uid = dict((href_a, uid) a_href_to_uid = dict(
for uid, (href_a, etag_a, href_b, etag_b) in status.iteritems()) (href_a, uid)
b_href_to_uid = dict((href_b, uid) for uid, (href_a, etag_a, href_b, etag_b) in status.iteritems()
for uid, (href_a, etag_a, href_b, etag_b) in status.iteritems()) )
b_href_to_uid = dict(
(href_b, uid)
for uid, (href_a, etag_a, href_b, etag_b) in status.iteritems()
)
# href => {'etag': etag, 'obj': optional object, 'uid': uid} # href => {'etag': etag, 'obj': optional object, 'uid': uid}
list_a = dict(prepare_list(storage_a, a_href_to_uid)) list_a = dict(prepare_list(storage_a, a_href_to_uid))
list_b = dict(prepare_list(storage_b, b_href_to_uid)) list_b = dict(prepare_list(storage_b, b_href_to_uid))
@ -73,6 +80,7 @@ def sync(storage_a, storage_b, status):
} }
for action, uid, source, dest in actions: for action, uid, source, dest in actions:
sync_logger.debug((action, uid, source, dest))
source_storage, source_list, source_uid_to_href = storages[source] source_storage, source_list, source_uid_to_href = storages[source]
dest_storage, dest_list, dest_uid_to_href = storages[dest] dest_storage, dest_list, dest_uid_to_href = storages[dest]
@ -114,8 +122,13 @@ def get_actions(list_a, list_b, status, a_uid_to_href, b_uid_to_href):
if uid not in status: if uid not in status:
if uid in uids_a and uid in uids_b: # missing status if uid in uids_a and uid in uids_b: # missing status
# TODO: might need some kind of diffing too? # TODO: might need some kind of diffing too?
assert type(a['obj'].raw) is unicode, repr(a['obj'].raw)
assert type(b['obj'].raw) is unicode, repr(b['obj'].raw)
if a['obj'].raw != b['obj'].raw: if a['obj'].raw != b['obj'].raw:
1 / 0 raise NotImplementedError(
'Conflict. No status and '
'different content on both sides.'
)
status[uid] = (href_a, a['etag'], href_b, b['etag']) status[uid] = (href_a, a['etag'], href_b, b['etag'])
# new item was created in a # new item was created in a
elif uid in uids_a and uid not in uids_b: elif uid in uids_a and uid not in uids_b:
@ -129,7 +142,9 @@ def get_actions(list_a, list_b, status, a_uid_to_href, b_uid_to_href):
_, status_etag_a, _, status_etag_b = status[uid] _, status_etag_a, _, status_etag_b = status[uid]
if uid in uids_a and uid in uids_b: if uid in uids_a and uid in uids_b:
if a['etag'] != status_etag_a and b['etag'] != status_etag_b: if a['etag'] != status_etag_a and b['etag'] != status_etag_b:
1 / 0 # conflict resolution TODO # conflict resolution TODO
raise NotImplementedError('Conflict. '
'New etags on both sides.')
elif a['etag'] != status_etag_a: # item was updated in a elif a['etag'] != status_etag_a: # item was updated in a
prefetch_from_a.append(href_a) prefetch_from_a.append(href_a)
actions.append(('update', uid, 'a', 'b')) actions.append(('update', uid, 'a', 'b'))

View file

@ -22,7 +22,7 @@ import vdirsyncer.exceptions as exceptions
class StorageTests(object): class StorageTests(object):
def _create_bogus_item(self, uid): def _create_bogus_item(self, uid):
return Item('''BEGIN:VCALENDAR return Item(u'''BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//dmfs.org//mimedir.icalendar//EN PRODID:-//dmfs.org//mimedir.icalendar//EN
BEGIN:VTODO BEGIN:VTODO

View file

@ -33,7 +33,7 @@ class SyncTests(TestCase):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item('UID:1') item = Item(u'UID:1')
a.upload(item) a.upload(item)
b.upload(item) b.upload(item)
sync(a, b, status) sync(a, b, status)
@ -46,8 +46,8 @@ class SyncTests(TestCase):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item1 = Item('UID:1\nhaha') item1 = Item(u'UID:1\nhaha')
item2 = Item('UID:1\nhoho') item2 = Item(u'UID:1\nhoho')
a.upload(item1) a.upload(item1)
b.upload(item2) b.upload(item2)
sync(a, b, status) sync(a, b, status)
@ -59,22 +59,22 @@ class SyncTests(TestCase):
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item('UID:1') # new item 1 in a item = Item(u'UID:1') # new item 1 in a
a.upload(item) a.upload(item)
sync(a, b, status) sync(a, b, status)
assert b.get('1.txt')[0].raw == item.raw assert b.get('1.txt')[0].raw == item.raw
item = Item('UID:1\nASDF:YES') # update of item 1 in b item = Item(u'UID:1\nASDF:YES') # update of item 1 in b
b.update('1.txt', item, b.get('1.txt')[1]) b.update('1.txt', item, b.get('1.txt')[1])
sync(a, b, status) sync(a, b, status)
assert a.get('1.txt')[0].raw == item.raw assert a.get('1.txt')[0].raw == item.raw
item2 = Item('UID:2') # new item 2 in b item2 = Item(u'UID:2') # new item 2 in b
b.upload(item2) b.upload(item2)
sync(a, b, status) sync(a, b, status)
assert a.get('2.txt')[0].raw == item2.raw assert a.get('2.txt')[0].raw == item2.raw
item2 = Item('UID:2\nASDF:YES') # update of item 2 in a item2 = Item(u'UID:2\nASDF:YES') # update of item 2 in a
a.update('2.txt', item2, a.get('2.txt')[1]) a.update('2.txt', item2, a.get('2.txt')[1])
sync(a, b, status) sync(a, b, status)
assert b.get('2.txt')[0].raw == item2.raw assert b.get('2.txt')[0].raw == item2.raw
@ -84,7 +84,7 @@ class SyncTests(TestCase):
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item('UID:1') item = Item(u'UID:1')
a.upload(item) a.upload(item)
sync(a, b, status) sync(a, b, status)
b.delete('1.txt', b.get('1.txt')[1]) b.delete('1.txt', b.get('1.txt')[1])
@ -101,7 +101,7 @@ class SyncTests(TestCase):
def test_already_synced(self): def test_already_synced(self):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item('UID:1') item = Item(u'UID:1')
a.upload(item) a.upload(item)
b.upload(item) b.upload(item)
status = { status = {