mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-03-25 08:55:50 +00:00
Merge branch 'singlefilestorage'
This commit is contained in:
commit
411bfb993b
9 changed files with 318 additions and 35 deletions
|
|
@ -85,5 +85,6 @@ END:VCALENDAR'''
|
|||
|
||||
SIMPLE_TEMPLATE = u'''BEGIN:FOO
|
||||
UID:{r}
|
||||
END:FOO
|
||||
'''
|
||||
X-SOMETHING:{r}
|
||||
HAHA:YES
|
||||
END:FOO'''
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ import random
|
|||
|
||||
import pytest
|
||||
|
||||
from .. import assert_item_equals
|
||||
from .. import assert_item_equals, SIMPLE_TEMPLATE
|
||||
import vdirsyncer.exceptions as exceptions
|
||||
from vdirsyncer.storage.base import Item
|
||||
from vdirsyncer.utils import text_type
|
||||
from vdirsyncer.utils import text_type, iteritems
|
||||
|
||||
|
||||
class StorageTests(object):
|
||||
item_template = u'X-SOMETHING:{r}'
|
||||
item_template = SIMPLE_TEMPLATE
|
||||
|
||||
def _create_bogus_item(self, item_template=None):
|
||||
r = random.random()
|
||||
|
|
@ -164,3 +164,17 @@ class StorageTests(object):
|
|||
href, etag = s.upload(self._create_bogus_item())
|
||||
assert s.has(href)
|
||||
assert not s.has('asd')
|
||||
|
||||
def test_update_others_stay_the_same(self):
|
||||
s = self._get_storage()
|
||||
info = dict([
|
||||
s.upload(self._create_bogus_item()),
|
||||
s.upload(self._create_bogus_item()),
|
||||
s.upload(self._create_bogus_item()),
|
||||
s.upload(self._create_bogus_item())
|
||||
])
|
||||
|
||||
assert dict(
|
||||
(href, etag) for href, item, etag
|
||||
in s.get_multi(href for href, etag in iteritems(info))
|
||||
) == info
|
||||
|
|
|
|||
50
tests/storage/test_singlefile.py
Normal file
50
tests/storage/test_singlefile.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
tests.storage.test_singlefile
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:copyright: (c) 2014 Markus Unterwaditzer
|
||||
:license: MIT, see LICENSE for more details.
|
||||
'''
|
||||
|
||||
import pytest
|
||||
|
||||
from .. import assert_item_equals
|
||||
from . import StorageTests
|
||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||
|
||||
|
||||
class TestSingleFileStorage(StorageTests):
|
||||
|
||||
storage_class = SingleFileStorage
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup(self, tmpdir):
|
||||
self._path = str(tmpdir.join('test.txt'))
|
||||
|
||||
def get_storage_args(self, **kwargs):
|
||||
return dict(path=self._path, wrapper=u'MYWRAPPER')
|
||||
|
||||
def test_discover(self):
|
||||
'''This test doesn't make any sense here.'''
|
||||
|
||||
def test_discover_collection_arg(self):
|
||||
'''This test doesn't make any sense here.'''
|
||||
|
||||
def test_collection_arg(self):
|
||||
'''This test doesn't make any sense here.'''
|
||||
|
||||
def test_update(self):
|
||||
'''The original testcase tries to fetch with the old href. But this
|
||||
storage doesn't have real hrefs, so the href might change if the
|
||||
underlying UID changes. '''
|
||||
|
||||
s = self._get_storage()
|
||||
item = self._create_bogus_item()
|
||||
href, etag = s.upload(item)
|
||||
assert_item_equals(s.get(href)[0], item)
|
||||
|
||||
new_item = self._create_bogus_item()
|
||||
s.update(href, new_item, etag)
|
||||
((new_href, new_etag),) = s.list()
|
||||
assert_item_equals(s.get(new_href)[0], new_item)
|
||||
|
|
@ -7,29 +7,39 @@
|
|||
:license: MIT, see LICENSE for more details.
|
||||
'''
|
||||
|
||||
from vdirsyncer.utils.vobject import split_collection
|
||||
from vdirsyncer.utils.vobject import split_collection, join_collection, \
|
||||
hash_item
|
||||
|
||||
from .. import normalize_item, SIMPLE_TEMPLATE, BARE_EVENT_TEMPLATE
|
||||
from .. import normalize_item, SIMPLE_TEMPLATE, BARE_EVENT_TEMPLATE, \
|
||||
EVENT_TEMPLATE
|
||||
|
||||
|
||||
_simple_joined = u'\r\n'.join((
|
||||
u'BEGIN:VADDRESSBOOK',
|
||||
SIMPLE_TEMPLATE.format(r=123),
|
||||
SIMPLE_TEMPLATE.format(r=345),
|
||||
SIMPLE_TEMPLATE.format(r=678),
|
||||
u'END:VADDRESSBOOK'
|
||||
))
|
||||
|
||||
_simple_split = [
|
||||
SIMPLE_TEMPLATE.format(r=123),
|
||||
SIMPLE_TEMPLATE.format(r=345),
|
||||
SIMPLE_TEMPLATE.format(r=678)
|
||||
]
|
||||
|
||||
|
||||
def test_split_collection_simple():
|
||||
input = u'\r\n'.join((
|
||||
u'BEGIN:VADDRESSBOOK',
|
||||
SIMPLE_TEMPLATE.format(r=123),
|
||||
SIMPLE_TEMPLATE.format(r=345),
|
||||
SIMPLE_TEMPLATE.format(r=678),
|
||||
u'END:VADDRESSBOOK'
|
||||
))
|
||||
given = split_collection(_simple_joined)
|
||||
assert [normalize_item(item) for item in given] == \
|
||||
[normalize_item(item) for item in _simple_split]
|
||||
|
||||
given = split_collection(input)
|
||||
expected = [
|
||||
SIMPLE_TEMPLATE.format(r=123),
|
||||
SIMPLE_TEMPLATE.format(r=345),
|
||||
SIMPLE_TEMPLATE.format(r=678)
|
||||
]
|
||||
|
||||
assert set(normalize_item(item) for item in given) == \
|
||||
set(normalize_item(item) for item in expected)
|
||||
def test_join_collection_simple():
|
||||
given = join_collection(_simple_split, wrapper=u'VADDRESSBOOK')
|
||||
print(given)
|
||||
print(_simple_joined)
|
||||
assert normalize_item(given) == normalize_item(_simple_joined)
|
||||
|
||||
|
||||
def test_split_collection_timezones():
|
||||
|
|
@ -66,3 +76,10 @@ def test_split_collection_timezones():
|
|||
)
|
||||
|
||||
assert given == expected
|
||||
|
||||
|
||||
def test_hash_item():
|
||||
a = EVENT_TEMPLATE.format(r=1)
|
||||
b = u'\n'.join(line for line in a.splitlines()
|
||||
if u'PRODID' not in line and u'VERSION' not in line)
|
||||
assert hash_item(a) == hash_item(b)
|
||||
|
|
|
|||
|
|
@ -15,10 +15,12 @@
|
|||
from .dav import CarddavStorage, CaldavStorage
|
||||
from .filesystem import FilesystemStorage
|
||||
from .http import HttpStorage
|
||||
from .singlefile import SingleFileStorage
|
||||
|
||||
storage_names = {
|
||||
'caldav': CaldavStorage,
|
||||
'carddav': CarddavStorage,
|
||||
'filesystem': FilesystemStorage,
|
||||
'http': HttpStorage
|
||||
'http': HttpStorage,
|
||||
'singlefile': SingleFileStorage
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@
|
|||
:license: MIT, see LICENSE for more details.
|
||||
'''
|
||||
|
||||
import hashlib
|
||||
|
||||
from .. import exceptions
|
||||
from .. import utils
|
||||
|
|
@ -35,16 +34,15 @@ class Item(object):
|
|||
|
||||
def __init__(self, raw):
|
||||
assert isinstance(raw, utils.text_type)
|
||||
raw = raw.splitlines()
|
||||
|
||||
for line in raw:
|
||||
for line in raw.splitlines():
|
||||
if line.startswith(u'UID:'):
|
||||
uid = line[4:].strip()
|
||||
if uid:
|
||||
self.uid = uid
|
||||
|
||||
self.raw = u'\n'.join(raw)
|
||||
self.hash = hashlib.sha256(self.raw.encode('utf-8')).hexdigest()
|
||||
self.raw = raw
|
||||
self.hash = utils.vobject.hash_item(raw)
|
||||
self.ident = self.uid or self.hash
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
from .base import Item, Storage
|
||||
from ..utils import expand_path, get_password, request, text_type, urlparse
|
||||
from ..utils.vobject import split_collection
|
||||
from ..exceptions import NotFoundError
|
||||
|
||||
USERAGENT = 'vdirsyncer'
|
||||
|
||||
|
|
@ -36,6 +37,7 @@ def prepare_verify(verify):
|
|||
|
||||
class HttpStorage(Storage):
|
||||
_repr_attributes = ('username', 'url')
|
||||
_items = None
|
||||
|
||||
def __init__(self, url, username='', password='', collection=None,
|
||||
verify=True, auth=None, useragent=USERAGENT, **kwargs):
|
||||
|
|
@ -67,7 +69,6 @@ class HttpStorage(Storage):
|
|||
self.url = url
|
||||
self.parsed_url = urlparse.urlparse(self.url)
|
||||
self.collection = collection
|
||||
self._items = {}
|
||||
|
||||
def _default_headers(self):
|
||||
return {'User-Agent': self.useragent}
|
||||
|
|
@ -75,13 +76,24 @@ class HttpStorage(Storage):
|
|||
def list(self):
|
||||
r = request('GET', self.url, **self._settings)
|
||||
r.raise_for_status()
|
||||
self._items.clear()
|
||||
self._items = {}
|
||||
rv = []
|
||||
for item in split_collection(r.text):
|
||||
item = Item(item)
|
||||
self._items[self._get_href(item)] = item, item.hash
|
||||
href = self._get_href(item)
|
||||
etag = item.hash
|
||||
self._items[href] = item, etag
|
||||
rv.append((href, etag))
|
||||
|
||||
for href, (item, etag) in self._items.items():
|
||||
yield href, etag
|
||||
# we can't use yield here because we need to populate our
|
||||
# dict even if the user doesn't exhaust the iterator
|
||||
return rv
|
||||
|
||||
def get(self, href):
|
||||
return self._items[href]
|
||||
if self._items is None:
|
||||
self.list()
|
||||
|
||||
try:
|
||||
return self._items[href]
|
||||
except KeyError:
|
||||
raise NotFoundError(href)
|
||||
|
|
|
|||
139
vdirsyncer/storage/singlefile.py
Normal file
139
vdirsyncer/storage/singlefile.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
vdirsyncer.storage.singlefile
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:copyright: (c) 2014 Markus Unterwaditzer
|
||||
:license: MIT, see LICENSE for more details.
|
||||
'''
|
||||
|
||||
import os
|
||||
import collections
|
||||
|
||||
from .base import Item, Storage
|
||||
import vdirsyncer.exceptions as exceptions
|
||||
import vdirsyncer.log as log
|
||||
from vdirsyncer.utils import expand_path, safe_write, itervalues
|
||||
from vdirsyncer.utils.vobject import split_collection, join_collection
|
||||
|
||||
logger = log.get(__name__)
|
||||
|
||||
|
||||
class SingleFileStorage(Storage):
|
||||
'''Save data in single VCALENDAR file, like Orage -- a calendar app for
|
||||
XFCE -- and Radicale do. Hashes are etags, UIDs or hashes are hrefs.
|
||||
|
||||
This storage has many raceconditions and is very slow.'''
|
||||
|
||||
_repr_attributes = ('path',)
|
||||
|
||||
_write_mode = 'wb'
|
||||
_append_mode = 'ab'
|
||||
_read_mode = 'rb'
|
||||
|
||||
_items = None
|
||||
|
||||
def __init__(self, path, wrapper=None, encoding='utf-8', create=True,
|
||||
collection=None, **kwargs):
|
||||
super(SingleFileStorage, self).__init__(**kwargs)
|
||||
path = expand_path(path)
|
||||
|
||||
if collection is not None:
|
||||
raise ValueError('collection is not a valid argument for {}'
|
||||
.format(type(self).__name__))
|
||||
|
||||
if not os.path.isfile(path):
|
||||
if os.path.exists(path):
|
||||
raise IOError('{} is not a file.'.format(path))
|
||||
if create:
|
||||
self._write_mode = 'wb+'
|
||||
self._append_mode = 'ab+'
|
||||
else:
|
||||
raise IOError('File {} does not exist. Use create = '
|
||||
'True in your configuration to automatically '
|
||||
'create it, or create it '
|
||||
'yourself.'.format(path))
|
||||
|
||||
self.path = path
|
||||
self.encoding = encoding
|
||||
self.create = create
|
||||
self.wrapper = wrapper
|
||||
|
||||
def list(self):
|
||||
self._items = collections.OrderedDict()
|
||||
|
||||
try:
|
||||
with open(self.path, self._read_mode) as f:
|
||||
text = f.read().decode(self.encoding)
|
||||
except IOError as e:
|
||||
import errno
|
||||
if e.errno != errno.ENOENT or not self.create: # file not found
|
||||
raise
|
||||
return ()
|
||||
|
||||
rv = []
|
||||
for item in split_collection(text):
|
||||
item = Item(item)
|
||||
href = self._get_href(item)
|
||||
etag = item.hash
|
||||
self._items[href] = item, etag
|
||||
rv.append((href, etag))
|
||||
|
||||
# we can't use yield here because we need to populate our
|
||||
# dict even if the user doesn't exhaust the iterator
|
||||
return rv
|
||||
|
||||
def get(self, href):
|
||||
if self._items is None:
|
||||
self.list()
|
||||
|
||||
try:
|
||||
return self._items[href]
|
||||
except KeyError:
|
||||
raise exceptions.NotFoundError(href)
|
||||
|
||||
def upload(self, item):
|
||||
href = self._get_href(item)
|
||||
self.list()
|
||||
if href in self._items:
|
||||
raise exceptions.AlreadyExistingError(href)
|
||||
|
||||
self._items[href] = item, item.hash
|
||||
self._write()
|
||||
return href, item.hash
|
||||
|
||||
def update(self, href, item, etag):
|
||||
self.list()
|
||||
if href not in self._items:
|
||||
raise exceptions.NotFoundError(href)
|
||||
|
||||
_, actual_etag = self._items[href]
|
||||
if etag != actual_etag:
|
||||
raise exceptions.WrongEtagError(etag, actual_etag)
|
||||
|
||||
self._items[href] = item, item.hash
|
||||
self._write()
|
||||
return item.hash
|
||||
|
||||
def delete(self, href, etag):
|
||||
self.list()
|
||||
if href not in self._items:
|
||||
raise exceptions.NotFoundError(href)
|
||||
|
||||
_, actual_etag = self._items[href]
|
||||
if etag != actual_etag:
|
||||
raise exceptions.WrongEtagError(etag, actual_etag)
|
||||
|
||||
del self._items[href]
|
||||
self._write()
|
||||
|
||||
def _write(self):
|
||||
text = join_collection(
|
||||
(item.raw for item, etag in itervalues(self._items)),
|
||||
wrapper=self.wrapper
|
||||
)
|
||||
try:
|
||||
with safe_write(self.path, self._write_mode) as f:
|
||||
f.write(text.encode(self.encoding))
|
||||
finally:
|
||||
self._items = None
|
||||
|
|
@ -6,14 +6,30 @@
|
|||
:copyright: (c) 2014 Markus Unterwaditzer
|
||||
:license: MIT, see LICENSE for more details.
|
||||
'''
|
||||
import hashlib
|
||||
|
||||
import icalendar.cal
|
||||
import icalendar.parser
|
||||
|
||||
from . import text_type, itervalues
|
||||
|
||||
|
||||
def hash_item(text):
|
||||
try:
|
||||
lines = to_unicode_lines(icalendar.cal.Component.from_ical(text))
|
||||
except Exception:
|
||||
lines = sorted(text.splitlines())
|
||||
|
||||
hashable = u'\r\n'.join(line.strip() for line in lines
|
||||
if line.strip() and
|
||||
u'PRODID' not in line and
|
||||
u'VERSION' not in line)
|
||||
return hashlib.sha256(hashable.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
def split_collection(text, inline=(u'VTIMEZONE',),
|
||||
wrap_items_with=(u'VCALENDAR',)):
|
||||
'''Emits items in the order they occur in the text.'''
|
||||
assert isinstance(text, text_type)
|
||||
collection = icalendar.cal.Component.from_ical(text)
|
||||
items = collection.subcomponents
|
||||
|
|
@ -38,7 +54,6 @@ def split_collection(text, inline=(u'VTIMEZONE',),
|
|||
|
||||
lines.extend(to_unicode_lines(item))
|
||||
lines.append(end)
|
||||
lines.append(u'')
|
||||
|
||||
yield u''.join(line + u'\r\n' for line in lines if line)
|
||||
|
||||
|
|
@ -50,3 +65,38 @@ def to_unicode_lines(item):
|
|||
for content_line in item.content_lines():
|
||||
if content_line:
|
||||
yield icalendar.parser.foldline(content_line)
|
||||
|
||||
|
||||
def join_collection(items, wrapper=None):
|
||||
timezones = {}
|
||||
components = []
|
||||
|
||||
for item in items:
|
||||
component = icalendar.cal.Component.from_ical(item)
|
||||
if component.name == u'VCALENDAR':
|
||||
assert wrapper is None or wrapper == u'VCALENDAR'
|
||||
wrapper = u'VCALENDAR'
|
||||
for subcomponent in component.subcomponents:
|
||||
if subcomponent.name == u'VTIMEZONE':
|
||||
timezones[subcomponent['TZID']] = subcomponent
|
||||
else:
|
||||
components.append(subcomponent)
|
||||
else:
|
||||
if component.name == u'VCARD':
|
||||
assert wrapper is None or wrapper == u'VADDRESSBOOK'
|
||||
wrapper = u'VADDRESSBOOK'
|
||||
components.append(component)
|
||||
|
||||
start = end = u''
|
||||
if wrapper is not None:
|
||||
start = u'BEGIN:{}'.format(wrapper)
|
||||
end = u'END:{}'.format(wrapper)
|
||||
|
||||
lines = [start]
|
||||
for timezone in itervalues(timezones):
|
||||
lines.extend(to_unicode_lines(timezone))
|
||||
for component in components:
|
||||
lines.extend(to_unicode_lines(component))
|
||||
lines.append(end)
|
||||
|
||||
return u''.join(line + u'\r\n' for line in lines if line)
|
||||
|
|
|
|||
Loading…
Reference in a new issue