Merge branch 'origin/master' into meta_description

This commit is contained in:
Hugo Osvaldo Barrera 2021-06-13 00:46:05 +02:00
commit e70e8c03e8
113 changed files with 3378 additions and 3314 deletions

39
.builds/archlinux.yaml Normal file
View file

@ -0,0 +1,39 @@
# Run tests using the packaged dependencies on ArchLinux.
image: archlinux
packages:
- docker
- docker-compose
# Build dependencies:
- python-pip
- python-wheel
# Runtime dependencies:
- python-atomicwrites
- python-click
- python-click-log
- python-click-threading
- python-requests
- python-requests-toolbelt
# Test dependencies:
- python-hypothesis
- python-pytest-cov
- python-pytest-localserver
sources:
- https://github.com/pimutils/vdirsyncer
environment:
BUILD: test
CI: true
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos
REQUIREMENTS: release
# TODO: ETESYNC_TESTS
tasks:
- setup: |
sudo systemctl start docker
cd vdirsyncer
python setup.py build
sudo pip install --no-index .
- test: |
cd vdirsyncer
make -e ci-test
make -e ci-test-storage

View file

@ -0,0 +1,30 @@
# Run tests using oldest available dependency versions.
#
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
# here, and consider that our "oldest suppported environment".
image: archlinux
packages:
- docker
- docker-compose
- python-pip
sources:
- https://github.com/pimutils/vdirsyncer
environment:
BUILD: test
CI: true
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos
REQUIREMENTS: minimal
# TODO: ETESYNC_TESTS
tasks:
- setup: |
sudo systemctl start docker
cd vdirsyncer
make -e install-dev
- test: |
cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test
make -e ci-test-storage

View file

@ -0,0 +1,32 @@
# Run tests using latest dependencies from PyPI
image: archlinux
packages:
- docker
- docker-compose
- python-pip
sources:
- https://github.com/pimutils/vdirsyncer
environment:
BUILD: test
CI: true
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: baikal radicale xandikos
REQUIREMENTS: release
# TODO: ETESYNC_TESTS
tasks:
- setup: |
sudo systemctl start docker
cd vdirsyncer
make -e install-dev -e install-docs
- test: |
cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test
make -e ci-test-storage
- style: |
cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e style

1
.gitignore vendored
View file

@ -14,3 +14,4 @@ dist
docs/_build/ docs/_build/
vdirsyncer/version.py vdirsyncer/version.py
.hypothesis .hypothesis
coverage.xml

6
.gitmodules vendored
View file

@ -1,6 +0,0 @@
[submodule "tests/storage/servers/owncloud"]
path = tests/storage/servers/owncloud
url = https://github.com/vdirsyncer/owncloud-testserver
[submodule "tests/storage/servers/nextcloud"]
path = tests/storage/servers/nextcloud
url = https://github.com/vdirsyncer/nextcloud-testserver

View file

@ -1,29 +1,23 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0 rev: v4.0.1
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
args: [--markdown-linebreak-ext=md] args: [--markdown-linebreak-ext=md]
- id: end-of-file-fixer - id: end-of-file-fixer
exclude: '.travis.yml'
- id: check-toml - id: check-toml
- id: check-added-large-files - id: check-added-large-files
- id: debug-statements - id: debug-statements
- repo: https://gitlab.com/pycqa/flake8 - repo: https://gitlab.com/pycqa/flake8
rev: "master" # pick a git hash / tag to point to rev: "3.9.2"
hooks: hooks:
- id: flake8 - id: flake8
additional_dependencies: [flake8-import-order, flake8-bugbear] additional_dependencies: [flake8-import-order, flake8-bugbear]
- repo: https://github.com/psf/black
rev: "21.6b0"
hooks:
- id: black
- repo: https://github.com/asottile/reorder_python_imports - repo: https://github.com/asottile/reorder_python_imports
rev: v2.3.0 rev: v2.5.0
hooks: hooks:
- id: reorder-python-imports - id: reorder-python-imports
- repo: local
hooks:
- id: update-travis
name: Update travis job definition
description: Ensures that travis job definition are up to date.
entry: scripts/make_travisconf.py
files: '.*travis.*'
stages: [commit]
language: script

View file

@ -1,89 +0,0 @@
{
"branches": {
"only": [
"master"
]
},
"cache": "pip",
"dist": "bionic",
"git": {
"submodules": false
},
"install": [
". scripts/travis-install.sh",
"make -e install-$BUILD"
],
"language": "python",
"matrix": {
"fast_finish": true,
"include": [
{
"env": "BUILD=style",
"python": "3.7"
},
{
"env": "BUILD=test REQUIREMENTS=release",
"python": "3.7"
},
{
"env": "BUILD=test-storage DAV_SERVER=radicale REQUIREMENTS=release ",
"python": "3.7"
},
{
"env": "BUILD=test-storage DAV_SERVER=xandikos REQUIREMENTS=release ",
"python": "3.7"
},
{
"env": "BUILD=test-storage DAV_SERVER=fastmail REQUIREMENTS=release ",
"python": "3.7"
},
{
"env": "BUILD=test REQUIREMENTS=minimal",
"python": "3.7"
},
{
"env": "BUILD=test-storage DAV_SERVER=radicale REQUIREMENTS=minimal ",
"python": "3.7"
},
{
"env": "BUILD=test-storage DAV_SERVER=xandikos REQUIREMENTS=minimal ",
"python": "3.7"
},
{
"env": "BUILD=test REQUIREMENTS=release",
"python": "3.8"
},
{
"env": "BUILD=test-storage DAV_SERVER=radicale REQUIREMENTS=release ",
"python": "3.8"
},
{
"env": "BUILD=test-storage DAV_SERVER=xandikos REQUIREMENTS=release ",
"python": "3.8"
},
{
"env": "BUILD=test REQUIREMENTS=minimal",
"python": "3.8"
},
{
"env": "BUILD=test-storage DAV_SERVER=radicale REQUIREMENTS=minimal ",
"python": "3.8"
},
{
"env": "BUILD=test-storage DAV_SERVER=xandikos REQUIREMENTS=minimal ",
"python": "3.8"
},
{
"env": "BUILD=test ETESYNC_TESTS=true REQUIREMENTS=latest",
"python": "3.7"
}
]
},
"script": [
"make -e $BUILD"
],
"services": [
"docker"
],
"sudo": true
}

View file

@ -9,6 +9,19 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_. <https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.18.0
==============
Note: Version 0.17 has some alpha releases but ultimately was never finalised.
0.18 actually continues where 0.16 left off.
- Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses
on keeping vdirsyncer compatible with newer environments.
- click 8 and click-threading 0.5.0 are now required.
- For those using ``pipsi``, we now recommend using ``pipx``, it's successor.
- Python 3.9 is now supported.
- Our Debian/Ubuntu build scripts have been updated. New versions should be
pushed to those repositories soon.
- Add "description" and "order" as metadata. These fetch the CalDAV: - Add "description" and "order" as metadata. These fetch the CalDAV:
calendar-description, CardDAV:addressbook-description and apple-ns:calendar-order calendar-description, CardDAV:addressbook-description and apple-ns:calendar-order
properties. properties.

View file

@ -1,5 +1,5 @@
# setuptools-scm includes everything tracked by git # setuptools-scm includes everything tracked by git
prune contrib prune docker
prune scripts prune scripts
prune tests/storage/servers prune tests/storage/servers
prune tests/storage/etesync prune tests/storage/etesync

View file

@ -15,7 +15,7 @@ export DETERMINISTIC_TESTS := false
# Run the etesync testsuite. # Run the etesync testsuite.
export ETESYNC_TESTS := false export ETESYNC_TESTS := false
# Assume to run in Travis. Don't use this outside of a virtual machine. It will # Assume to run in CI. Don't use this outside of a virtual machine. It will
# heavily "pollute" your system, such as attempting to install a new Python # heavily "pollute" your system, such as attempting to install a new Python
# systemwide. # systemwide.
export CI := false export CI := false
@ -36,55 +36,29 @@ ifeq ($(ETESYNC_TESTS), true)
endif endif
PYTEST = py.test $(PYTEST_ARGS) PYTEST = py.test $(PYTEST_ARGS)
export TESTSERVER_BASE := ./tests/storage/servers/
CODECOV_PATH = /tmp/codecov.sh CODECOV_PATH = /tmp/codecov.sh
ifeq ($(CI), true) all:
test-storage: $(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
curl -s https://codecov.io/bash > $(CODECOV_PATH)
$(PYTEST) tests/storage/ ci-test:
bash $(CODECOV_PATH) -c -F storage
test:
curl -s https://codecov.io/bash > $(CODECOV_PATH) curl -s https://codecov.io/bash > $(CODECOV_PATH)
$(PYTEST) tests/unit/ $(PYTEST) tests/unit/
bash $(CODECOV_PATH) -c -F unit bash $(CODECOV_PATH) -c -F unit
$(PYTEST) tests/system/ $(PYTEST) tests/system/
bash $(CODECOV_PATH) -c -F system bash $(CODECOV_PATH) -c -F system
[ "$(ETESYNC_TESTS)" = "false" ] || make test-storage [ "$(ETESYNC_TESTS)" = "false" ] || make test-storage
else
test:
$(PYTEST)
endif
all: ci-test-storage:
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation) curl -s https://codecov.io/bash > $(CODECOV_PATH)
install-servers:
set -ex; \ set -ex; \
for server in $(DAV_SERVER); do \ for server in $(DAV_SERVER); do \
if [ ! "$$(ls $(TESTSERVER_BASE)$$server/)" ]; then \ DAV_SERVER=$$server $(PYTEST) --cov-append tests/storage; \
git submodule update --init -- "$(TESTSERVER_BASE)$$server"; \
fi; \
(cd $(TESTSERVER_BASE)$$server && sh install.sh); \
done done
bash $(CODECOV_PATH) -c -F storage
install-test: install-servers install-dev test:
pip install -Ur test-requirements.txt $(PYTEST)
set -xe && if [ "$$REQUIREMENTS" = "devel" ]; then \
pip install -U --force-reinstall \
git+https://github.com/DRMacIver/hypothesis \
git+https://github.com/kennethreitz/requests \
git+https://github.com/pytest-dev/pytest; \
fi
[ -z "$(TEST_EXTRA_PACKAGES)" ] || pip install $(TEST_EXTRA_PACKAGES)
install-test-storage: install-test
# This is just an alias
true
install-style: install-docs install-dev
pip install pre-commit
style: style:
pre-commit run --all pre-commit run --all
@ -97,8 +71,6 @@ install-docs:
docs: docs:
cd docs && make html cd docs && make html
linkcheck:
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/ sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
release-deb: release-deb:
@ -111,21 +83,11 @@ release-deb:
install-dev: install-dev:
pip install -U pip setuptools wheel pip install -U pip setuptools wheel
pip install -e . pip install -e .
pip install -Ur test-requirements.txt $(TEST_EXTRA_PACKAGES)
pip install pre-commit
[ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync] [ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync]
set -xe && if [ "$(REQUIREMENTS)" = "devel" ]; then \ set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install -U --force-reinstall \
git+https://github.com/mitsuhiko/click \
git+https://github.com/kennethreitz/requests; \
elif [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \ pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
fi fi
ssh-submodule-urls:
git submodule foreach "\
echo -n 'Old: '; \
git remote get-url origin; \
git remote set-url origin \$$(git remote get-url origin | sed -e 's/https:\/\/github\.com\//git@github.com:/g'); \
echo -n 'New URL: '; \
git remote get-url origin"
.PHONY: docs .PHONY: docs

View file

@ -2,8 +2,8 @@
vdirsyncer vdirsyncer
========== ==========
.. image:: https://travis-ci.org/pimutils/vdirsyncer.svg?branch=master .. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg
:target: https://travis-ci.org/pimutils/vdirsyncer :target: https://builds.sr.ht/~whynothugo/vdirsyncer
:alt: CI status :alt: CI status
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master .. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master

View file

@ -4,4 +4,5 @@ Documentation=https://vdirsyncer.readthedocs.org/
[Service] [Service]
ExecStart=/usr/bin/vdirsyncer sync ExecStart=/usr/bin/vdirsyncer sync
Type=oneshot RuntimeMaxSec=3m
Restart=on-failure

View file

@ -1,17 +0,0 @@
version: '3'
services:
xandikos:
build: docker/xandikos/
ports:
- '8000:8000'
radicale:
build: docker/radicale/
ports:
- '8001:8001'
baikal:
build: docker/baikal/
ports:
- '8002:80'

View file

@ -1,26 +0,0 @@
# Based on https://github.com/ckulka/baikal-docker
# Sadly, we can't override the VOLUME it has set, and we want some static
# config.
FROM php:7.4-apache
ENV VERSION 0.7.0
ADD https://github.com/sabre-io/Baikal/releases/download/$VERSION/baikal-$VERSION.zip .
RUN apt-get update && apt-get install -y sqlite3 unzip
RUN unzip -q baikal-$VERSION.zip -d /var/www/
RUN chown -R www-data:www-data /var/www/baikal && \
docker-php-ext-install pdo pdo_mysql
COPY apache.conf /etc/apache2/sites-enabled/000-default.conf
COPY start.sh /opt/
RUN a2enmod rewrite
COPY baikal.yaml /var/www/baikal/config/baikal.yaml
COPY configure.sql /configure.sql
RUN touch /var/www/baikal/Specific/INSTALL_DISABLED
RUN cat /configure.sql | sqlite3 /var/www/baikal/Specific/db/db.sqlite
RUN chmod -R 777 /var/www/baikal/Specific/ /var/www/baikal/config/
CMD [ "sh", "/opt/start.sh" ]

View file

@ -1,25 +0,0 @@
# Shameless copied from https://github.com/ckulka/baikal-docker/blob/master/files/apache.conf
<VirtualHost *:80>
# InjectedServerAlias dav.example.org dav.example.io
DocumentRoot /var/www/baikal/html
RewriteEngine On
RewriteRule /.well-known/carddav /dav.php [R,L]
RewriteRule /.well-known/caldav /dav.php [R,L]
<Directory "/var/www/baikal/html">
Options None
Options +FollowSymlinks
AllowOverride All
# Confiugration for apache-2.2:
Order allow,deny
Allow from all
# Confiugration for apache-2.4:
Require all granted
</Directory>
</VirtualHost>

View file

@ -1,18 +0,0 @@
system:
configured_version: 0.7.0
timezone: Europe/Paris
card_enabled: true
cal_enabled: true
dav_auth_type: Basic
admin_passwordhash: 6a890c3aa185845a4bee1e1caed92e1faaf2dec6772291dca301cef6782e3bce
auth_realm: BaikalDAV
invite_from: noreply@localhost
database:
sqlite_file: /var/www/baikal/Specific/db/db.sqlite
mysql: false
mysql_host: ''
mysql_dbname: ''
mysql_username: ''
mysql_password: ''
encryption_key: bdf3bec969736e122e6d5f72c282c49e
configured_version: ''

View file

@ -1,139 +0,0 @@
PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
CREATE TABLE addressbooks (
id integer primary key asc NOT NULL,
principaluri text NOT NULL,
displayname text,
uri text NOT NULL,
description text,
synctoken integer DEFAULT 1 NOT NULL
);
INSERT INTO addressbooks VALUES(1,'principals/baikal','Default Address Book','default','Default Address Book for Baikal',1);
CREATE TABLE cards (
id integer primary key asc NOT NULL,
addressbookid integer NOT NULL,
carddata blob,
uri text NOT NULL,
lastmodified integer,
etag text,
size integer
);
CREATE TABLE addressbookchanges (
id integer primary key asc NOT NULL,
uri text,
synctoken integer NOT NULL,
addressbookid integer NOT NULL,
operation integer NOT NULL
);
CREATE TABLE calendarobjects (
id integer primary key asc NOT NULL,
calendardata blob NOT NULL,
uri text NOT NULL,
calendarid integer NOT NULL,
lastmodified integer NOT NULL,
etag text NOT NULL,
size integer NOT NULL,
componenttype text,
firstoccurence integer,
lastoccurence integer,
uid text
);
CREATE TABLE calendars (
id integer primary key asc NOT NULL,
synctoken integer DEFAULT 1 NOT NULL,
components text NOT NULL
);
INSERT INTO calendars VALUES(1,1,'VEVENT,VTODO');
CREATE TABLE calendarinstances (
id integer primary key asc NOT NULL,
calendarid integer,
principaluri text,
access integer,
displayname text,
uri text NOT NULL,
description text,
calendarorder integer,
calendarcolor text,
timezone text,
transparent bool,
share_href text,
share_displayname text,
share_invitestatus integer DEFAULT '2',
UNIQUE (principaluri, uri),
UNIQUE (calendarid, principaluri),
UNIQUE (calendarid, share_href)
);
INSERT INTO calendarinstances VALUES(1,1,'principals/baikal',NULL,'Default calendar','default','Default calendar',0,'','Europe/Paris',NULL,NULL,NULL,2);
CREATE TABLE calendarchanges (
id integer primary key asc NOT NULL,
uri text,
synctoken integer NOT NULL,
calendarid integer NOT NULL,
operation integer NOT NULL
);
CREATE TABLE calendarsubscriptions (
id integer primary key asc NOT NULL,
uri text NOT NULL,
principaluri text NOT NULL,
source text NOT NULL,
displayname text,
refreshrate text,
calendarorder integer,
calendarcolor text,
striptodos bool,
stripalarms bool,
stripattachments bool,
lastmodified int
);
CREATE TABLE schedulingobjects (
id integer primary key asc NOT NULL,
principaluri text NOT NULL,
calendardata blob,
uri text NOT NULL,
lastmodified integer,
etag text NOT NULL,
size integer NOT NULL
);
CREATE TABLE locks (
id integer primary key asc NOT NULL,
owner text,
timeout integer,
created integer,
token text,
scope integer,
depth integer,
uri text
);
CREATE TABLE principals (
id INTEGER PRIMARY KEY ASC NOT NULL,
uri TEXT NOT NULL,
email TEXT,
displayname TEXT,
UNIQUE(uri)
);
INSERT INTO principals VALUES(1,'principals/baikal','baikal@example.com','Baikal');
CREATE TABLE groupmembers (
id INTEGER PRIMARY KEY ASC NOT NULL,
principal_id INTEGER NOT NULL,
member_id INTEGER NOT NULL,
UNIQUE(principal_id, member_id)
);
CREATE TABLE propertystorage (
id integer primary key asc NOT NULL,
path text NOT NULL,
name text NOT NULL,
valuetype integer NOT NULL,
value string
);
CREATE TABLE users (
id integer primary key asc NOT NULL,
username TEXT NOT NULL,
digesta1 TEXT NOT NULL,
UNIQUE(username)
);
INSERT INTO users VALUES(1,'baikal','3b0845b235b7e985ce5905ab8df45e1a');
CREATE INDEX addressbookid_synctoken ON addressbookchanges (addressbookid, synctoken);
CREATE INDEX calendarid_synctoken ON calendarchanges (calendarid, synctoken);
CREATE INDEX principaluri_uri ON calendarsubscriptions (principaluri, uri);
CREATE UNIQUE INDEX path_property ON propertystorage (path, name);
COMMIT;

View file

@ -1,16 +0,0 @@
#!/bin/sh
# Shameless copied from https://raw.githubusercontent.com/ckulka/baikal-docker/master/files/start.sh
# Inject ServerName and ServerAlias if specified
APACHE_CONFIG="/etc/apache2/sites-available/000-default.conf"
if [ ! -z ${BAIKAL_SERVERNAME+x} ]
then
sed -i "s/# InjectedServerName .*/ServerName $BAIKAL_SERVERNAME/g" $APACHE_CONFIG
fi
if [ ! -z ${BAIKAL_SERVERALIAS+x} ]
then
sed -i "s/# InjectedServerAlias .*/ServerAlias $BAIKAL_SERVERALIAS/g" $APACHE_CONFIG
fi
apache2-foreground

View file

@ -1,5 +0,0 @@
FROM python:3.8
RUN pip install radicale
CMD radicale --storage-filesystem-folder /tmp/dav -H 0.0.0.0:8001 -D

View file

@ -1,13 +0,0 @@
# Original file copyright 2017 Jelmer Vernooij
FROM ubuntu:bionic
RUN apt-get update && apt-get -y install xandikos locales
EXPOSE 8000
RUN locale-gen en_US.UTF-8
ENV PYTHONIOENCODING=utf-8
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
CMD xandikos -d /tmp/dav -l 0.0.0.0 -p 8000 --autocreate

View file

@ -3,89 +3,104 @@ import os
from pkg_resources import get_distribution from pkg_resources import get_distribution
extensions = ['sphinx.ext.autodoc'] extensions = ["sphinx.ext.autodoc"]
templates_path = ['_templates'] templates_path = ["_templates"]
source_suffix = '.rst' source_suffix = ".rst"
master_doc = 'index' master_doc = "index"
project = 'vdirsyncer' project = "vdirsyncer"
copyright = ('2014-{}, Markus Unterwaditzer & contributors' copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
.format(datetime.date.today().strftime('%Y'))) datetime.date.today().strftime("%Y")
)
release = get_distribution('vdirsyncer').version release = get_distribution("vdirsyncer").version
version = '.'.join(release.split('.')[:2]) # The short X.Y version. version = ".".join(release.split(".")[:2]) # The short X.Y version.
rst_epilog = '.. |vdirsyncer_version| replace:: %s' % release rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release
exclude_patterns = ['_build'] exclude_patterns = ["_build"]
pygments_style = 'sphinx' pygments_style = "sphinx"
on_rtd = os.environ.get('READTHEDOCS', None) == 'True' on_rtd = os.environ.get("READTHEDOCS", None) == "True"
try: try:
import sphinx_rtd_theme import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError: except ImportError:
html_theme = 'default' html_theme = "default"
if not on_rtd: if not on_rtd:
print('-' * 74) print("-" * 74)
print('Warning: sphinx-rtd-theme not installed, building with default ' print(
'theme.') "Warning: sphinx-rtd-theme not installed, building with default " "theme."
print('-' * 74) )
print("-" * 74)
html_static_path = ['_static'] html_static_path = ["_static"]
htmlhelp_basename = 'vdirsyncerdoc' htmlhelp_basename = "vdirsyncerdoc"
latex_elements = {} latex_elements = {}
latex_documents = [ latex_documents = [
('index', 'vdirsyncer.tex', 'vdirsyncer Documentation', (
'Markus Unterwaditzer', 'manual'), "index",
"vdirsyncer.tex",
"vdirsyncer Documentation",
"Markus Unterwaditzer",
"manual",
),
] ]
man_pages = [ man_pages = [
('index', 'vdirsyncer', 'vdirsyncer Documentation', ("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1)
['Markus Unterwaditzer'], 1)
] ]
texinfo_documents = [ texinfo_documents = [
('index', 'vdirsyncer', 'vdirsyncer Documentation', (
'Markus Unterwaditzer', 'vdirsyncer', "index",
'Synchronize calendars and contacts.', 'Miscellaneous'), "vdirsyncer",
"vdirsyncer Documentation",
"Markus Unterwaditzer",
"vdirsyncer",
"Synchronize calendars and contacts.",
"Miscellaneous",
),
] ]
def github_issue_role(name, rawtext, text, lineno, inliner, def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()):
options={}, content=()): # noqa: B006 options = options or {}
try: try:
issue_num = int(text) issue_num = int(text)
if issue_num <= 0: if issue_num <= 0:
raise ValueError() raise ValueError()
except ValueError: except ValueError:
msg = inliner.reporter.error(f'Invalid GitHub issue: {text}', msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg) prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg] return [prb], [msg]
from docutils import nodes from docutils import nodes
PROJECT_HOME = 'https://github.com/pimutils/vdirsyncer' PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
link = '{}/{}/{}'.format(PROJECT_HOME, link = "{}/{}/{}".format(
'issues' if name == 'gh' else 'pull', PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num
issue_num) )
linktext = ('issue #{}' if name == 'gh' linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num)
else 'pull request #{}').format(issue_num) node = nodes.reference(rawtext, linktext, refuri=link, **options)
node = nodes.reference(rawtext, linktext, refuri=link,
**options)
return [node], [] return [node], []
def setup(app): def setup(app):
from sphinx.domains.python import PyObject from sphinx.domains.python import PyObject
app.add_object_type('storage', 'storage', 'pair: %s; storage',
doc_field_types=PyObject.doc_field_types) app.add_object_type(
app.add_role('gh', github_issue_role) "storage",
app.add_role('ghpr', github_issue_role) "storage",
"pair: %s; storage",
doc_field_types=PyObject.doc_field_types,
)
app.add_role("gh", github_issue_role)
app.add_role("ghpr", github_issue_role)

View file

@ -269,7 +269,7 @@ in terms of data safety**. See `this blog post
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back <https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
up your data. up your data.
At first run you will be asked to authorize application for google account At first run you will be asked to authorize application for Google account
access. access.
To use this storage type, you need to install some additional dependencies:: To use this storage type, you need to install some additional dependencies::
@ -411,6 +411,7 @@ Local
fileext = "..." fileext = "..."
#encoding = "utf-8" #encoding = "utf-8"
#post_hook = null #post_hook = null
#fileignoreext = ".tmp"
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
a more formal description of the format. a more formal description of the format.
@ -424,11 +425,15 @@ Local
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the :param fileext: The file extension to use (e.g. ``.txt``). Contained in the
href, so if you change the file extension after a sync, this will href, so if you change the file extension after a sync, this will
trigger a re-download of everything (but *should* not cause data-loss trigger a re-download of everything (but *should* not cause data-loss
of any kind). of any kind). To be compatible with the ``vset`` format you have
to either use ``.vcf`` or ``.ics``. Note that metasync won't work
if you use an empty string here.
:param encoding: File encoding for items, both content and filename. :param encoding: File encoding for items, both content and filename.
:param post_hook: A command to call for each item creation and :param post_hook: A command to call for each item creation and
modification. The command will be called with the path of the modification. The command will be called with the path of the
new/updated file. new/updated file.
:param fileeignoreext: The file extention to ignore. It is only useful
if fileext is set to the empty string. The default is ``.tmp``.
.. storage:: singlefile .. storage:: singlefile

View file

@ -2,7 +2,7 @@
Support and Contact Support and Contact
=================== ===================
* The ``#pimutils`` `IRC channel on Freenode <https://pimutils.org/contact>`_ * The ``#pimutils`` `IRC channel on Libera.Chat <https://pimutils.org/contact>`_
might be active, depending on your timezone. Use it for support and general might be active, depending on your timezone. Use it for support and general
(including off-topic) discussion. (including off-topic) discussion.

View file

@ -75,8 +75,8 @@ Submitting patches, pull requests
Running tests, how to set up your development environment Running tests, how to set up your development environment
--------------------------------------------------------- ---------------------------------------------------------
For many patches, it might suffice to just let Travis run the tests. However, For many patches, it might suffice to just let CI run the tests. However,
Travis is slow, so you might want to run them locally too. For this, set up a CI is slow, so you might want to run them locally too. For this, set up a
virtualenv_ and run this inside of it:: virtualenv_ and run this inside of it::
# install: # install:
@ -87,8 +87,8 @@ virtualenv_ and run this inside of it::
# Install git commit hook for some extra linting and checking # Install git commit hook for some extra linting and checking
pre-commit install pre-commit install
# install test dependencies # Install development dependencies
make install-test make install-dev
Then you can run:: Then you can run::
@ -100,15 +100,13 @@ The ``Makefile`` has a lot of options that allow you to control which tests are
run, and which servers are tested. Take a look at its code where they are all run, and which servers are tested. Take a look at its code where they are all
initialized and documented. initialized and documented.
For example, to test xandikos, first run the server itself:: To tests against a specific DAV server, use ``DAV_SERVER``::
docker-compose build xandikos
docker-compose up -d xandikos
Then run the tests specifying this ``DAV_SERVER``, run::
make DAV_SERVER=xandikos test make DAV_SERVER=xandikos test
The server will be initialised in a docker container and terminated at the end
of the test suite.
If you have any questions, feel free to open issues about it. If you have any questions, feel free to open issues about it.
Structure of the testsuite Structure of the testsuite

View file

@ -81,7 +81,7 @@ the simplest possible way would look something like::
virtualenv ~/vdirsyncer_env virtualenv ~/vdirsyncer_env
~/vdirsyncer_env/bin/pip install vdirsyncer ~/vdirsyncer_env/bin/pip install vdirsyncer
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer"
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``. You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
@ -95,22 +95,22 @@ This method has two advantages:
The clean, easy way The clean, easy way
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
pipsi_ is a new package manager for Python-based software that automatically pipx_ is a new package manager for Python-based software that automatically
sets up a virtualenv for each program you install. Assuming you have it sets up a virtualenv for each program you install. Assuming you have it
installed on your operating system, you can do:: installed on your operating system, you can do::
pipsi install --python python3 vdirsyncer pipx install vdirsyncer
and ``.local/bin/vdirsyncer`` will be your new vdirsyncer installation. To and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
update vdirsyncer to the latest version:: update vdirsyncer to the latest version::
pipsi upgrade vdirsyncer pipx upgrade vdirsyncer
If you're done with vdirsyncer, you can do:: If you're done with vdirsyncer, you can do::
pipsi uninstall vdirsyncer pipx uninstall vdirsyncer
and vdirsyncer will be uninstalled, including its dependencies. and vdirsyncer will be uninstalled, including its dependencies.
.. _virtualenv: https://virtualenv.readthedocs.io/ .. _virtualenv: https://virtualenv.readthedocs.io/
.. _pipsi: https://github.com/mitsuhiko/pipsi .. _pipx: https://github.com/pipxproject/pipx

View file

@ -5,10 +5,9 @@ Packaging guidelines
Thank you very much for packaging vdirsyncer! The following guidelines should Thank you very much for packaging vdirsyncer! The following guidelines should
help you to avoid some common pitfalls. help you to avoid some common pitfalls.
While they are called guidelines and therefore theoretically not mandatory, if If you find yourself needing to patch anything, or going in a different direction,
you consider going a different direction, please first open an issue or contact please open an issue so we can also address in a way that works for everyone. Otherwise
me otherwise instead of just going ahead. These guidelines exist for my own we get bug reports for code or scenarios that don't exist in upstream vdirsycner.
convenience too.
Obtaining the source code Obtaining the source code
========================= =========================
@ -17,8 +16,7 @@ The main distribution channel is `PyPI
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be <https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
obtained there. We mirror the same package tarball and wheel as GitHub obtained there. We mirror the same package tarball and wheel as GitHub
releases. Please do not confuse these with the auto-generated GitHub "Source releases. Please do not confuse these with the auto-generated GitHub "Source
Code" tarball; that one contains useless junk and are more of a distraction Code" tarball. Those are missing some important metadata and your build will fail.
than anything else.
We give each release a tag in the git repo. If you want to get notified of new We give each release a tag in the git repo. If you want to get notified of new
releases, `GitHub's feed releases, `GitHub's feed
@ -39,13 +37,13 @@ Testing
======= =======
Everything testing-related goes through the ``Makefile`` in the root of the Everything testing-related goes through the ``Makefile`` in the root of the
repository or PyPI package. Trying to e.g. run ``py.test`` directly will repository or PyPI package. Trying to e.g. run ``pytest`` directly will
require a lot of environment variables to be set (for configuration) and you require a lot of environment variables to be set (for configuration) and you
probably don't want to deal with that. probably don't want to deal with that.
You can install the testing dependencies with:: You can install the all development dependencies with::
make install-test make install-dev
You probably don't want this since it will use pip to download the You probably don't want this since it will use pip to download the
dependencies. Alternatively you can find the testing dependencies in dependencies. Alternatively you can find the testing dependencies in

View file

@ -29,7 +29,7 @@ It's quite possible that the default "every fifteen minutes" interval isn't to
your liking. No default will suit everybody, but this is configurable by simply your liking. No default will suit everybody, but this is configurable by simply
running:: running::
systemctl --user edit vdirsyncer systemctl --user edit vdirsyncer.timer
This will open a blank editor, where you can override the timer by including:: This will open a blank editor, where you can override the timer by including::

View file

@ -99,7 +99,7 @@ collections for faster search and lookup.
The reason items' filenames don't contain any extra information is simple: The The reason items' filenames don't contain any extra information is simple: The
solutions presented induced duplication of data, where one duplicate might solutions presented induced duplication of data, where one duplicate might
become out of date because of bad implementations. As it stands right now, a become out of date because of bad implementations. As it stands right now, an
index format could be formalized separately though. index format could be formalized separately though.
vdirsyncer doesn't really have to bother about efficient item lookup, because vdirsyncer doesn't really have to bother about efficient item lookup, because

View file

@ -50,7 +50,7 @@ program chosen:
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
e.g. DAVDroid_ or the apps by dmfs_. e.g. DAVx⁵_ or the apps by dmfs_.
.. _DAVDroid: http://davdroid.bitfire.at/ .. _DAVx⁵: https://www.davx5.com/
.. _dmfs: https://dmfs.org/ .. _dmfs: https://dmfs.org/

View file

@ -3,35 +3,40 @@ ARG distrover
FROM $distro:$distrover FROM $distro:$distrover
ARG distro
ARG distrover
RUN apt-get update RUN apt-get update
RUN apt-get install -y build-essential fakeroot debhelper git RUN apt-get install -y build-essential fakeroot debhelper git
RUN apt-get install -y python3-all python3-pip RUN apt-get install -y python3-all python3-pip python3-venv
RUN apt-get install -y ruby ruby-dev RUN apt-get install -y ruby ruby-dev
RUN apt-get install -y python-all python-pip
RUN gem install fpm RUN gem install fpm package_cloud
RUN pip2 install virtualenv-tools RUN pip3 install virtualenv virtualenv-tools3
RUN pip3 install virtualenv
RUN virtualenv -p python3 /vdirsyncer/env/ RUN virtualenv -p python3 /vdirsyncer/env/
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
RUN pip3 uninstall -y virtualenv
RUN echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
RUN chmod +x /usr/local/bin/virtualenv
COPY . /vdirsyncer/vdirsyncer/ COPY . /vdirsyncer/vdirsyncer/
WORKDIR /vdirsyncer/vdirsyncer/ WORKDIR /vdirsyncer/vdirsyncer/
RUN mkdir /vdirsyncer/pkgs/ RUN mkdir /vdirsyncer/pkgs/
RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt
RUN . /vdirsyncer/env/bin/activate; fpm -s virtualenv -t deb \ RUN fpm --verbose \
-n "vdirsyncer-latest" \ --input-type virtualenv \
-v "$(cat version)" \ --output-type deb \
--prefix /opt/venvs/vdirsyncer-latest \ --name "vdirsyncer-latest" \
requirements.txt --version "$(cat version)" \
--prefix /opt/venvs/vdirsyncer-latest \
--depends python3 \
requirements.txt
RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/ RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
WORKDIR /vdirsyncer/pkgs/ WORKDIR /vdirsyncer/pkgs/
RUN dpkg -i *.deb RUN dpkg -i *.deb
# Check that it works:
RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version

View file

@ -1,82 +0,0 @@
#!/usr/bin/env python
import itertools
import json
python_versions = ["3.7", "3.8"]
cfg = {}
cfg['sudo'] = True
cfg['dist'] = 'bionic'
cfg['language'] = 'python'
cfg['cache'] = 'pip'
cfg['services'] = ['docker']
cfg['git'] = {
'submodules': False
}
cfg['branches'] = {
'only': ['master']
}
cfg['install'] = """
. scripts/travis-install.sh
make -e install-$BUILD
""".strip().splitlines()
cfg['script'] = ["make -e $BUILD"]
matrix = []
cfg['matrix'] = {'include': matrix, 'fast_finish': True}
matrix.append({
'python': python_versions[0],
'env': 'BUILD=style'
})
for python, requirements in itertools.product(
python_versions,
# XXX: Use `devel` here for recent python versions:
("release", "minimal")
):
dav_servers = ("radicale", "xandikos")
matrix.append({
'python': python,
'env': f"BUILD=test REQUIREMENTS={requirements}",
})
if python == python_versions[0] and requirements == "release":
dav_servers += ("fastmail",)
for dav_server in dav_servers:
job = {
'python': python,
'env': ("BUILD=test-storage "
f"DAV_SERVER={dav_server} "
f"REQUIREMENTS={requirements} ")
}
if dav_server in ("davical", "icloud"):
job['if'] = 'NOT (type IN (pull_request))'
matrix.append(job)
matrix.append({
'python': python_versions[0],
'env': ("BUILD=test "
"ETESYNC_TESTS=true "
"REQUIREMENTS=latest")
})
# matrix.append({
# 'language': 'generic',
# 'os': 'osx',
# 'env': 'BUILD=test'
# })
with open('.travis.yml', 'w') as output:
json.dump(cfg, output, sort_keys=True, indent=2)

View file

@ -1,19 +1,26 @@
#!/bin/sh #!/bin/sh
set -xe set -xe
distro=$1
distrover=$2
name=vdirsyncer-$distro-$distrover:latest
context="$(mktemp -d)"
python setup.py sdist -d "$context" DISTRO=$1
cp scripts/dpkg.Dockerfile "$context/Dockerfile" DISTROVER=$2
NAME="vdirsyncer-${DISTRO}-${DISTROVER}:latest"
CONTEXT="$(mktemp -d)"
python setup.py sdist -d "$CONTEXT"
# Build the package in a container with the right distro version.
docker build \ docker build \
--build-arg distro=$distro \ --build-arg distro=$DISTRO \
--build-arg distrover=$distrover \ --build-arg distrover=$DISTROVER \
-t $name \ -t $NAME \
"$context" -f scripts/dpkg.Dockerfile \
"$CONTEXT"
docker run $name tar -c -C /vdirsyncer pkgs | tar x -C "$context" # Push the package to packagecloud.
package_cloud push pimutils/vdirsyncer/$distro/$distrover $context/pkgs/*.deb # TODO: Use ~/.packagecloud for CI.
rm -rf "$context" docker run -e PACKAGECLOUD_TOKEN=$PACKAGECLOUD_TOKEN $NAME \
bash -xec "package_cloud push pimutils/vdirsyncer/$DISTRO/$DISTROVER *.deb"
rm -rf "$CONTEXT"

View file

@ -1,10 +0,0 @@
#!/bin/sh
# The OS X VM doesn't have any Python support at all
# See https://github.com/travis-ci/travis-ci/issues/2312
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
brew update
brew install python3
virtualenv -p python3 $HOME/osx-py3
. $HOME/osx-py3/bin/activate
fi

View file

@ -2,7 +2,6 @@
universal = 1 universal = 1
[tool:pytest] [tool:pytest]
norecursedirs = tests/storage/servers/*
addopts = addopts =
--tb=short --tb=short
--cov-config .coveragerc --cov-config .coveragerc
@ -11,12 +10,12 @@ addopts =
--no-cov-on-fail --no-cov-on-fail
[flake8] [flake8]
# E731: Use a def instead of lambda expr application-import-names = tests,vdirsyncer
# E743: Ambiguous function definition extend-ignore =
ignore = E731, E743 E203, # Black-incompatible colon spacing.
# E503: Line break occurred before a binary operator W503, # Line jump before binary operator.
extend-ignore = E203, W503 I100,
I202
max-line-length = 88 max-line-length = 88
select = C,E,F,W,B,B9 exclude = .eggs,build
exclude = .eggs, tests/storage/servers/owncloud/, tests/storage/servers/nextcloud/, tests/storage/servers/baikal/, build/ import-order-style = smarkets
application-package-names = tests,vdirsyncer

View file

@ -1,9 +1,9 @@
''' """
Vdirsyncer synchronizes calendars and contacts. Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer. how to package vdirsyncer.
''' """
from setuptools import Command from setuptools import Command
from setuptools import find_packages from setuptools import find_packages
from setuptools import setup from setuptools import setup
@ -11,25 +11,21 @@ from setuptools import setup
requirements = [ requirements = [
# https://github.com/mitsuhiko/click/issues/200 # https://github.com/mitsuhiko/click/issues/200
'click>=5.0', "click>=5.0,<9.0",
'click-log>=0.3.0, <0.4.0', "click-log>=0.3.0, <0.4.0",
# https://github.com/pimutils/vdirsyncer/issues/478 # https://github.com/pimutils/vdirsyncer/issues/478
'click-threading>=0.2', "click-threading>=0.5",
"requests >=2.20.0",
'requests >=2.20.0',
# https://github.com/sigmavirus24/requests-toolbelt/pull/28 # https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54 # And https://github.com/sigmavirus24/requests-toolbelt/issues/54
'requests_toolbelt >=0.4.0', "requests_toolbelt >=0.4.0",
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa # https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
'atomicwrites>=0.1.7' "atomicwrites>=0.1.7",
] ]
class PrintRequirements(Command): class PrintRequirements(Command):
description = 'Prints minimal requirements' description = "Prints minimal requirements"
user_options = [] user_options = []
def initialize_options(self): def initialize_options(self):
@ -43,53 +39,44 @@ class PrintRequirements(Command):
print(requirement.replace(">", "=").replace(" ", "")) print(requirement.replace(">", "=").replace(" ", ""))
with open('README.rst') as f: with open("README.rst") as f:
long_description = f.read() long_description = f.read()
setup( setup(
# General metadata # General metadata
name='vdirsyncer', name="vdirsyncer",
author='Markus Unterwaditzer', author="Markus Unterwaditzer",
author_email='markus@unterwaditzer.net', author_email="markus@unterwaditzer.net",
url='https://github.com/pimutils/vdirsyncer', url="https://github.com/pimutils/vdirsyncer",
description='Synchronize calendars and contacts', description="Synchronize calendars and contacts",
license='BSD', license="BSD",
long_description=long_description, long_description=long_description,
# Runtime dependencies # Runtime dependencies
install_requires=requirements, install_requires=requirements,
# Optional dependencies # Optional dependencies
extras_require={ extras_require={
'google': ['requests-oauthlib'], "google": ["requests-oauthlib"],
'etesync': ['etesync==0.5.2', 'django<2.0'] "etesync": ["etesync==0.5.2", "django<2.0"],
}, },
# Build dependencies # Build dependencies
setup_requires=['setuptools_scm != 1.12.0'], setup_requires=["setuptools_scm != 1.12.0"],
# Other # Other
packages=find_packages(exclude=['tests.*', 'tests']), packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True, include_package_data=True,
cmdclass={ cmdclass={"minimal_requirements": PrintRequirements},
'minimal_requirements': PrintRequirements use_scm_version={"write_to": "vdirsyncer/version.py"},
}, entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
use_scm_version={
'write_to': 'vdirsyncer/version.py'
},
entry_points={
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
},
classifiers=[ classifiers=[
'Development Status :: 4 - Beta', "Development Status :: 4 - Beta",
'Environment :: Console', "Environment :: Console",
'License :: OSI Approved :: BSD License', "License :: OSI Approved :: BSD License",
'Operating System :: POSIX', "Operating System :: POSIX",
'Programming Language :: Python :: 3', "Programming Language :: Python :: 3",
'Programming Language :: Python :: 3.7', "Programming Language :: Python :: 3.7",
'Programming Language :: Python :: 3.8', "Programming Language :: Python :: 3.8",
'Topic :: Internet', "Programming Language :: Python :: 3.9",
'Topic :: Utilities', "Topic :: Internet",
"Topic :: Utilities",
], ],
) )

View file

@ -1,5 +1,4 @@
hypothesis>=5.0.0 hypothesis>=5.0.0,<7.0.0
pytest pytest
pytest-cov pytest-cov
pytest-localserver pytest-localserver
pytest-subtesthack

View file

@ -1,6 +1,6 @@
''' """
Test suite for vdirsyncer. Test suite for vdirsyncer.
''' """
import hypothesis.strategies as st import hypothesis.strategies as st
import urllib3.exceptions import urllib3.exceptions
@ -10,14 +10,14 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def blow_up(*a, **kw): def blow_up(*a, **kw):
raise AssertionError('Did not expect to be called.') raise AssertionError("Did not expect to be called.")
def assert_item_equals(a, b): def assert_item_equals(a, b):
assert normalize_item(a) == normalize_item(b) assert normalize_item(a) == normalize_item(b)
VCARD_TEMPLATE = '''BEGIN:VCARD VCARD_TEMPLATE = """BEGIN:VCARD
VERSION:3.0 VERSION:3.0
FN:Cyrus Daboo FN:Cyrus Daboo
N:Daboo;Cyrus;;; N:Daboo;Cyrus;;;
@ -31,9 +31,9 @@ TEL;TYPE=FAX:412 605 0705
URL;VALUE=URI:http://www.example.com URL;VALUE=URI:http://www.example.com
X-SOMETHING:{r} X-SOMETHING:{r}
UID:{uid} UID:{uid}
END:VCARD''' END:VCARD"""
TASK_TEMPLATE = '''BEGIN:VCALENDAR TASK_TEMPLATE = """BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//dmfs.org//mimedir.icalendar//EN PRODID:-//dmfs.org//mimedir.icalendar//EN
BEGIN:VTODO BEGIN:VTODO
@ -45,25 +45,30 @@ SUMMARY:Book: Kowlani - Tödlicher Staub
X-SOMETHING:{r} X-SOMETHING:{r}
UID:{uid} UID:{uid}
END:VTODO END:VTODO
END:VCALENDAR''' END:VCALENDAR"""
BARE_EVENT_TEMPLATE = '''BEGIN:VEVENT BARE_EVENT_TEMPLATE = """BEGIN:VEVENT
DTSTART:19970714T170000Z DTSTART:19970714T170000Z
DTEND:19970715T035959Z DTEND:19970715T035959Z
SUMMARY:Bastille Day Party SUMMARY:Bastille Day Party
X-SOMETHING:{r} X-SOMETHING:{r}
UID:{uid} UID:{uid}
END:VEVENT''' END:VEVENT"""
EVENT_TEMPLATE = '''BEGIN:VCALENDAR EVENT_TEMPLATE = (
"""BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//hacksw/handcal//NONSGML v1.0//EN PRODID:-//hacksw/handcal//NONSGML v1.0//EN
''' + BARE_EVENT_TEMPLATE + ''' """
END:VCALENDAR''' + BARE_EVENT_TEMPLATE
+ """
END:VCALENDAR"""
)
EVENT_WITH_TIMEZONE_TEMPLATE = '''BEGIN:VCALENDAR EVENT_WITH_TIMEZONE_TEMPLATE = (
"""BEGIN:VCALENDAR
BEGIN:VTIMEZONE BEGIN:VTIMEZONE
TZID:Europe/Rome TZID:Europe/Rome
X-LIC-LOCATION:Europe/Rome X-LIC-LOCATION:Europe/Rome
@ -82,26 +87,23 @@ DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD END:STANDARD
END:VTIMEZONE END:VTIMEZONE
''' + BARE_EVENT_TEMPLATE + ''' """
END:VCALENDAR''' + BARE_EVENT_TEMPLATE
+ """
END:VCALENDAR"""
)
SIMPLE_TEMPLATE = '''BEGIN:FOO SIMPLE_TEMPLATE = """BEGIN:FOO
UID:{uid} UID:{uid}
X-SOMETHING:{r} X-SOMETHING:{r}
HAHA:YES HAHA:YES
END:FOO''' END:FOO"""
printable_characters_strategy = st.text( printable_characters_strategy = st.text(
st.characters(blacklist_categories=( st.characters(blacklist_categories=("Cc", "Cs"))
'Cc', 'Cs'
))
) )
uid_strategy = st.text( uid_strategy = st.text(
st.characters(blacklist_categories=( st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
'Zs', 'Zl', 'Zp',
'Cc', 'Cs'
)),
min_size=1
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)

View file

@ -1,6 +1,6 @@
''' """
General-purpose fixtures for vdirsyncer's testsuite. General-purpose fixtures for vdirsyncer's testsuite.
''' """
import logging import logging
import os import os
@ -13,35 +13,42 @@ from hypothesis import Verbosity
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def setup_logging(): def setup_logging():
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG) click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG)
try: try:
import pytest_benchmark import pytest_benchmark
except ImportError: except ImportError:
@pytest.fixture @pytest.fixture
def benchmark(): def benchmark():
return lambda x: x() return lambda x: x()
else: else:
del pytest_benchmark del pytest_benchmark
settings.register_profile("ci", settings( settings.register_profile(
max_examples=1000, "ci",
verbosity=Verbosity.verbose, settings(
suppress_health_check=[HealthCheck.too_slow], max_examples=1000,
)) verbosity=Verbosity.verbose,
settings.register_profile("deterministic", settings( suppress_health_check=[HealthCheck.too_slow],
derandomize=True, ),
suppress_health_check=HealthCheck.all(), )
)) settings.register_profile(
settings.register_profile("dev", settings( "deterministic",
suppress_health_check=[HealthCheck.too_slow] settings(
)) derandomize=True,
suppress_health_check=HealthCheck.all(),
),
)
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
if os.environ.get('DETERMINISTIC_TESTS', 'false').lower() == 'true': if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true":
settings.load_profile("deterministic") settings.load_profile("deterministic")
elif os.environ.get('CI', 'false').lower() == 'true': elif os.environ.get("CI", "false").lower() == "true":
settings.load_profile("ci") settings.load_profile("ci")
else: else:
settings.load_profile("dev") settings.load_profile("dev")

View file

@ -4,14 +4,11 @@ import uuid
from urllib.parse import quote as urlquote from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote from urllib.parse import unquote as urlunquote
import hypothesis.strategies as st
import pytest import pytest
from hypothesis import given
from .. import assert_item_equals from .. import assert_item_equals
from .. import EVENT_TEMPLATE from .. import EVENT_TEMPLATE
from .. import normalize_item from .. import normalize_item
from .. import printable_characters_strategy
from .. import TASK_TEMPLATE from .. import TASK_TEMPLATE
from .. import VCARD_TEMPLATE from .. import VCARD_TEMPLATE
from vdirsyncer import exceptions from vdirsyncer import exceptions
@ -21,7 +18,8 @@ from vdirsyncer.vobject import Item
def get_server_mixin(server_name): def get_server_mixin(server_name):
from . import __name__ as base from . import __name__ as base
x = __import__(f'{base}.servers.{server_name}', fromlist=[''])
x = __import__(f"{base}.servers.{server_name}", fromlist=[""])
return x.ServerMixin return x.ServerMixin
@ -36,18 +34,18 @@ class StorageTests:
supports_collections = True supports_collections = True
supports_metadata = True supports_metadata = True
@pytest.fixture(params=['VEVENT', 'VTODO', 'VCARD']) @pytest.fixture(params=["VEVENT", "VTODO", "VCARD"])
def item_type(self, request): def item_type(self, request):
'''Parametrize with all supported item types.''' """Parametrize with all supported item types."""
return request.param return request.param
@pytest.fixture @pytest.fixture
def get_storage_args(self): def get_storage_args(self):
''' """
Return a function with the following properties: Return a function with the following properties:
:param collection: The name of the collection to create and use. :param collection: The name of the collection to create and use.
''' """
raise NotImplementedError() raise NotImplementedError()
@pytest.fixture @pytest.fixture
@ -57,9 +55,9 @@ class StorageTests:
@pytest.fixture @pytest.fixture
def get_item(self, item_type): def get_item(self, item_type):
template = { template = {
'VEVENT': EVENT_TEMPLATE, "VEVENT": EVENT_TEMPLATE,
'VTODO': TASK_TEMPLATE, "VTODO": TASK_TEMPLATE,
'VCARD': VCARD_TEMPLATE, "VCARD": VCARD_TEMPLATE,
}[item_type] }[item_type]
return lambda **kw: format_item(template, **kw) return lambda **kw: format_item(template, **kw)
@ -67,12 +65,12 @@ class StorageTests:
@pytest.fixture @pytest.fixture
def requires_collections(self): def requires_collections(self):
if not self.supports_collections: if not self.supports_collections:
pytest.skip('This storage does not support collections.') pytest.skip("This storage does not support collections.")
@pytest.fixture @pytest.fixture
def requires_metadata(self): def requires_metadata(self):
if not self.supports_metadata: if not self.supports_metadata:
pytest.skip('This storage does not support metadata.') pytest.skip("This storage does not support metadata.")
def test_generic(self, s, get_item): def test_generic(self, s, get_item):
items = [get_item() for i in range(1, 10)] items = [get_item() for i in range(1, 10)]
@ -98,7 +96,7 @@ class StorageTests:
href, etag = s.upload(get_item()) href, etag = s.upload(get_item())
if etag is None: if etag is None:
_, etag = s.get(href) _, etag = s.get(href)
(href2, item, etag2), = s.get_multi([href] * 2) ((href2, item, etag2),) = s.get_multi([href] * 2)
assert href2 == href assert href2 == href
assert etag2 == etag assert etag2 == etag
@ -131,7 +129,7 @@ class StorageTests:
def test_update_nonexisting(self, s, get_item): def test_update_nonexisting(self, s, get_item):
item = get_item() item = get_item()
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
s.update('huehue', item, '"123"') s.update("huehue", item, '"123"')
def test_wrong_etag(self, s, get_item): def test_wrong_etag(self, s, get_item):
item = get_item() item = get_item()
@ -148,7 +146,7 @@ class StorageTests:
def test_delete_nonexisting(self, s, get_item): def test_delete_nonexisting(self, s, get_item):
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
s.delete('1', '"123"') s.delete("1", '"123"')
def test_list(self, s, get_item): def test_list(self, s, get_item):
assert not list(s.list()) assert not list(s.list())
@ -158,10 +156,10 @@ class StorageTests:
assert list(s.list()) == [(href, etag)] assert list(s.list()) == [(href, etag)]
def test_has(self, s, get_item): def test_has(self, s, get_item):
assert not s.has('asd') assert not s.has("asd")
href, etag = s.upload(get_item()) href, etag = s.upload(get_item())
assert s.has(href) assert s.has(href)
assert not s.has('asd') assert not s.has("asd")
s.delete(href, etag) s.delete(href, etag)
assert not s.has(href) assert not s.has(href)
@ -174,8 +172,8 @@ class StorageTests:
info[href] = etag info[href] = etag
assert { assert {
href: etag for href, item, etag href: etag
in s.get_multi(href for href, etag in info.items()) for href, item, etag in s.get_multi(href for href, etag in info.items())
} == info } == info
def test_repr(self, s, get_storage_args): def test_repr(self, s, get_storage_args):
@ -185,80 +183,76 @@ class StorageTests:
def test_discover(self, requires_collections, get_storage_args, get_item): def test_discover(self, requires_collections, get_storage_args, get_item):
collections = set() collections = set()
for i in range(1, 5): for i in range(1, 5):
collection = f'test{i}' collection = f"test{i}"
s = self.storage_class(**get_storage_args(collection=collection)) s = self.storage_class(**get_storage_args(collection=collection))
assert not list(s.list()) assert not list(s.list())
s.upload(get_item()) s.upload(get_item())
collections.add(s.collection) collections.add(s.collection)
actual = { actual = {
c['collection'] for c in c["collection"]
self.storage_class.discover(**get_storage_args(collection=None)) for c in self.storage_class.discover(**get_storage_args(collection=None))
} }
assert actual >= collections assert actual >= collections
def test_create_collection(self, requires_collections, get_storage_args, def test_create_collection(self, requires_collections, get_storage_args, get_item):
get_item): if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
if getattr(self, 'dav_server', '') in \ pytest.skip("Manual cleanup would be necessary.")
('icloud', 'fastmail', 'davical'): if getattr(self, "dav_server", "") == "radicale":
pytest.skip('Manual cleanup would be necessary.')
if getattr(self, 'dav_server', '') == "radicale":
pytest.skip("Radicale does not support collection creation") pytest.skip("Radicale does not support collection creation")
args = get_storage_args(collection=None) args = get_storage_args(collection=None)
args['collection'] = 'test' args["collection"] = "test"
s = self.storage_class( s = self.storage_class(**self.storage_class.create_collection(**args))
**self.storage_class.create_collection(**args)
)
href = s.upload(get_item())[0] href = s.upload(get_item())[0]
assert href in {href for href, etag in s.list()} assert href in (href for href, etag in s.list())
def test_discover_collection_arg(self, requires_collections, def test_discover_collection_arg(self, requires_collections, get_storage_args):
get_storage_args): args = get_storage_args(collection="test2")
args = get_storage_args(collection='test2')
with pytest.raises(TypeError) as excinfo: with pytest.raises(TypeError) as excinfo:
list(self.storage_class.discover(**args)) list(self.storage_class.discover(**args))
assert 'collection argument must not be given' in str(excinfo.value) assert "collection argument must not be given" in str(excinfo.value)
def test_collection_arg(self, get_storage_args): def test_collection_arg(self, get_storage_args):
if self.storage_class.storage_name.startswith('etesync'): if self.storage_class.storage_name.startswith("etesync"):
pytest.skip('etesync uses UUIDs.') pytest.skip("etesync uses UUIDs.")
if self.supports_collections: if self.supports_collections:
s = self.storage_class(**get_storage_args(collection='test2')) s = self.storage_class(**get_storage_args(collection="test2"))
# Can't do stronger assertion because of radicale, which needs a # Can't do stronger assertion because of radicale, which needs a
# fileextension to guess the collection type. # fileextension to guess the collection type.
assert 'test2' in s.collection assert "test2" in s.collection
else: else:
with pytest.raises(ValueError): with pytest.raises(ValueError):
self.storage_class(collection='ayy', **get_storage_args()) self.storage_class(collection="ayy", **get_storage_args())
def test_case_sensitive_uids(self, s, get_item): def test_case_sensitive_uids(self, s, get_item):
if s.storage_name == 'filesystem': if s.storage_name == "filesystem":
pytest.skip('Behavior depends on the filesystem.') pytest.skip("Behavior depends on the filesystem.")
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
s.upload(get_item(uid=uid.upper())) s.upload(get_item(uid=uid.upper()))
s.upload(get_item(uid=uid.lower())) s.upload(get_item(uid=uid.lower()))
items = list(href for href, etag in s.list()) items = [href for href, etag in s.list()]
assert len(items) == 2 assert len(items) == 2
assert len(set(items)) == 2 assert len(set(items)) == 2
def test_specialchars(self, monkeypatch, requires_collections, def test_specialchars(
get_storage_args, get_item): self, monkeypatch, requires_collections, get_storage_args, get_item
if getattr(self, 'dav_server', '') == 'radicale': ):
pytest.skip('Radicale is fundamentally broken.') if getattr(self, "dav_server", "") == "radicale":
if getattr(self, 'dav_server', '') in ('icloud', 'fastmail'): pytest.skip("Radicale is fundamentally broken.")
pytest.skip('iCloud and FastMail reject this name.') if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
pytest.skip("iCloud and FastMail reject this name.")
monkeypatch.setattr('vdirsyncer.utils.generate_href', lambda x: x) monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
uid = 'test @ foo ät bar град сатану' uid = "test @ foo ät bar град сатану"
collection = 'test @ foo ät bar' collection = "test @ foo ät bar"
s = self.storage_class(**get_storage_args(collection=collection)) s = self.storage_class(**get_storage_args(collection=collection))
item = get_item(uid=uid) item = get_item(uid=uid)
@ -269,55 +263,66 @@ class StorageTests:
assert etag2 == etag assert etag2 == etag
assert_item_equals(item2, item) assert_item_equals(item2, item)
(_, etag3), = s.list() ((_, etag3),) = s.list()
assert etag2 == etag3 assert etag2 == etag3
# etesync uses UUIDs for collection names # etesync uses UUIDs for collection names
if self.storage_class.storage_name.startswith('etesync'): if self.storage_class.storage_name.startswith("etesync"):
return return
assert collection in urlunquote(s.collection) assert collection in urlunquote(s.collection)
if self.storage_class.storage_name.endswith('dav'): if self.storage_class.storage_name.endswith("dav"):
assert urlquote(uid, '/@:') in href assert urlquote(uid, "/@:") in href
def test_metadata(self, requires_metadata, s): def test_metadata(self, requires_metadata, s):
if not getattr(self, 'dav_server', ''): if not getattr(self, "dav_server", ""):
assert not s.get_meta('color') assert not s.get_meta("color")
assert not s.get_meta('displayname') assert not s.get_meta("displayname")
try: try:
s.set_meta('color', None) s.set_meta("color", None)
assert not s.get_meta('color') assert not s.get_meta("color")
s.set_meta('color', '#ff0000') s.set_meta("color", "#ff0000")
assert s.get_meta('color') == '#ff0000' assert s.get_meta("color") == "#ff0000"
except exceptions.UnsupportedMetadataError: except exceptions.UnsupportedMetadataError:
pass pass
for x in ('hello world', 'hello wörld'): for x in ("hello world", "hello wörld"):
s.set_meta('displayname', x) s.set_meta("displayname", x)
rv = s.get_meta('displayname') rv = s.get_meta("displayname")
assert rv == x assert rv == x
assert isinstance(rv, str) assert isinstance(rv, str)
@given(value=st.one_of( @pytest.mark.parametrize(
st.none(), "value",
printable_characters_strategy [
)) None,
"",
"Hello there!",
"Österreich",
"中国",
"한글",
"42a4ec99-b1c2-4859-b142-759112f2ca50",
"فلسطين",
],
)
def test_metadata_normalization(self, requires_metadata, s, value): def test_metadata_normalization(self, requires_metadata, s, value):
x = s.get_meta('displayname') x = s.get_meta("displayname")
assert x == normalize_meta_value(x) assert x == normalize_meta_value(x)
if not getattr(self, 'dav_server', None): if not getattr(self, "dav_server", None):
# ownCloud replaces "" with "unnamed" # ownCloud replaces "" with "unnamed"
s.set_meta('displayname', value) s.set_meta("displayname", value)
assert s.get_meta('displayname') == normalize_meta_value(value) assert s.get_meta("displayname") == normalize_meta_value(value)
def test_recurring_events(self, s, item_type): def test_recurring_events(self, s, item_type):
if item_type != 'VEVENT': if item_type != "VEVENT":
pytest.skip('This storage instance doesn\'t support iCalendar.') pytest.skip("This storage instance doesn't support iCalendar.")
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
item = Item(textwrap.dedent(''' item = Item(
textwrap.dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
BEGIN:VEVENT BEGIN:VEVENT
@ -351,7 +356,11 @@ class StorageTests:
TRANSP:OPAQUE TRANSP:OPAQUE
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
'''.format(uid=uid)).strip()) """.format(
uid=uid
)
).strip()
)
href, etag = s.upload(item) href, etag = s.upload(item)

View file

@ -1,6 +1,82 @@
import contextlib
import subprocess
import time
import uuid import uuid
import pytest import pytest
import requests
def wait_for_container(url):
"""Wait for a container to initialise.
Polls a URL every 100ms until the server responds.
"""
# give the server 5 seconds to settle
for _ in range(50):
print(_)
try:
response = requests.get(url)
response.raise_for_status()
except requests.ConnectionError:
pass
else:
return
time.sleep(0.1)
pytest.exit(
"Server did not initialise in 5 seconds.\n"
"WARNING: There may be a stale docker container still running."
)
@contextlib.contextmanager
def dockerised_server(name, container_port, exposed_port):
"""Run a dockerised DAV server as a contenxt manager."""
container_id = None
url = f"http://127.0.0.1:{exposed_port}/"
try:
# Hint: This will block while the pull happends, and only return once
# the container has actually started.
output = subprocess.check_output(
[
"docker",
"run",
"--detach",
"--publish",
f"{exposed_port}:{container_port}",
f"whynothugo/vdirsyncer-devkit-{name}",
]
)
container_id = output.decode().strip()
wait_for_container(url)
yield url
finally:
if container_id:
subprocess.check_output(["docker", "kill", container_id])
@pytest.fixture(scope="session")
def baikal_server():
with dockerised_server("baikal", "80", "8002"):
yield
@pytest.fixture(scope="session")
def radicale_server():
with dockerised_server("radicale", "8001", "8001"):
yield
@pytest.fixture(scope="session")
def xandikos_server():
with dockerised_server("xandikos", "8000", "8000"):
yield
@pytest.fixture @pytest.fixture
@ -11,13 +87,13 @@ def slow_create_collection(request):
def delete_collections(): def delete_collections():
for s in to_delete: for s in to_delete:
s.session.request('DELETE', '') s.session.request("DELETE", "")
request.addfinalizer(delete_collections) request.addfinalizer(delete_collections)
def inner(cls, args, collection): def inner(cls, args, collection):
assert collection.startswith('test') assert collection.startswith("test")
collection += '-vdirsyncer-ci-' + str(uuid.uuid4()) collection += "-vdirsyncer-ci-" + str(uuid.uuid4())
args = cls.create_collection(collection, **args) args = cls.create_collection(collection, **args)
s = cls(**args) s = cls(**args)

View file

@ -11,26 +11,25 @@ from vdirsyncer import exceptions
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
dav_server = os.environ.get('DAV_SERVER', 'skip') dav_server = os.environ.get("DAV_SERVER", "skip")
ServerMixin = get_server_mixin(dav_server) ServerMixin = get_server_mixin(dav_server)
class DAVStorageTests(ServerMixin, StorageTests): class DAVStorageTests(ServerMixin, StorageTests):
dav_server = dav_server dav_server = dav_server
@pytest.mark.skipif(dav_server == 'radicale', @pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
reason='Radicale is very tolerant.')
def test_dav_broken_item(self, s): def test_dav_broken_item(self, s):
item = Item('HAHA:YES') item = Item("HAHA:YES")
with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)): with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)):
s.upload(item) s.upload(item)
assert not list(s.list()) assert not list(s.list())
def test_dav_empty_get_multi_performance(self, s, monkeypatch): def test_dav_empty_get_multi_performance(self, s, monkeypatch):
def breakdown(*a, **kw): def breakdown(*a, **kw):
raise AssertionError('Expected not to be called.') raise AssertionError("Expected not to be called.")
monkeypatch.setattr('requests.sessions.Session.request', breakdown) monkeypatch.setattr("requests.sessions.Session.request", breakdown)
try: try:
assert list(s.get_multi([])) == [] assert list(s.get_multi([])) == []
@ -39,12 +38,11 @@ class DAVStorageTests(ServerMixin, StorageTests):
monkeypatch.undo() monkeypatch.undo()
def test_dav_unicode_href(self, s, get_item, monkeypatch): def test_dav_unicode_href(self, s, get_item, monkeypatch):
if self.dav_server == 'radicale': if self.dav_server == "radicale":
pytest.skip('Radicale is unable to deal with unicode hrefs') pytest.skip("Radicale is unable to deal with unicode hrefs")
monkeypatch.setattr(s, '_get_href', monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
lambda item: item.ident + s.fileext) item = get_item(uid="град сатану" + str(uuid.uuid4()))
item = get_item(uid='град сатану' + str(uuid.uuid4()))
href, etag = s.upload(item) href, etag = s.upload(item)
item2, etag2 = s.get(href) item2, etag2 = s.get(href)
assert_item_equals(item, item2) assert_item_equals(item, item2)

View file

@ -17,10 +17,11 @@ from vdirsyncer.storage.dav import CalDAVStorage
class TestCalDAVStorage(DAVStorageTests): class TestCalDAVStorage(DAVStorageTests):
storage_class = CalDAVStorage storage_class = CalDAVStorage
@pytest.fixture(params=['VTODO', 'VEVENT']) @pytest.fixture(params=["VTODO", "VEVENT"])
def item_type(self, request): def item_type(self, request):
return request.param return request.param
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
def test_doesnt_accept_vcard(self, item_type, get_storage_args): def test_doesnt_accept_vcard(self, item_type, get_storage_args):
s = self.storage_class(item_types=(item_type,), **get_storage_args()) s = self.storage_class(item_types=(item_type,), **get_storage_args())
@ -32,15 +33,20 @@ class TestCalDAVStorage(DAVStorageTests):
# The `arg` param is not named `item_types` because that would hit # The `arg` param is not named `item_types` because that would hit
# https://bitbucket.org/pytest-dev/pytest/issue/745/ # https://bitbucket.org/pytest-dev/pytest/issue/745/
@pytest.mark.parametrize('arg,calls_num', [ @pytest.mark.parametrize(
(('VTODO',), 1), "arg,calls_num",
(('VEVENT',), 1), [
(('VTODO', 'VEVENT'), 2), (("VTODO",), 1),
(('VTODO', 'VEVENT', 'VJOURNAL'), 3), (("VEVENT",), 1),
((), 1) (("VTODO", "VEVENT"), 2),
]) (("VTODO", "VEVENT", "VJOURNAL"), 3),
def test_item_types_performance(self, get_storage_args, arg, calls_num, ((), 1),
monkeypatch): ],
)
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
def test_item_types_performance(
self, get_storage_args, arg, calls_num, monkeypatch
):
s = self.storage_class(item_types=arg, **get_storage_args()) s = self.storage_class(item_types=arg, **get_storage_args())
old_parse = s._parse_prop_responses old_parse = s._parse_prop_responses
calls = [] calls = []
@ -49,19 +55,23 @@ class TestCalDAVStorage(DAVStorageTests):
calls.append(None) calls.append(None)
return old_parse(*a, **kw) return old_parse(*a, **kw)
monkeypatch.setattr(s, '_parse_prop_responses', new_parse) monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
list(s.list()) list(s.list())
assert len(calls) == calls_num assert len(calls) == calls_num
@pytest.mark.xfail(dav_server == 'radicale', @pytest.mark.xfail(
reason='Radicale doesn\'t support timeranges.') dav_server == "radicale", reason="Radicale doesn't support timeranges."
)
def test_timerange_correctness(self, get_storage_args): def test_timerange_correctness(self, get_storage_args):
start_date = datetime.datetime(2013, 9, 10) start_date = datetime.datetime(2013, 9, 10)
end_date = datetime.datetime(2013, 9, 13) end_date = datetime.datetime(2013, 9, 13)
s = self.storage_class(start_date=start_date, end_date=end_date, s = self.storage_class(
**get_storage_args()) start_date=start_date, end_date=end_date, **get_storage_args()
)
too_old_item = format_item(dedent(''' too_old_item = format_item(
dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//hacksw/handcal//NONSGML v1.0//EN PRODID:-//hacksw/handcal//NONSGML v1.0//EN
@ -73,9 +83,13 @@ class TestCalDAVStorage(DAVStorageTests):
UID:{r} UID:{r}
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
''').strip()) """
).strip()
)
too_new_item = format_item(dedent(''' too_new_item = format_item(
dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//hacksw/handcal//NONSGML v1.0//EN PRODID:-//hacksw/handcal//NONSGML v1.0//EN
@ -87,9 +101,13 @@ class TestCalDAVStorage(DAVStorageTests):
UID:{r} UID:{r}
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
''').strip()) """
).strip()
)
good_item = format_item(dedent(''' good_item = format_item(
dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//hacksw/handcal//NONSGML v1.0//EN PRODID:-//hacksw/handcal//NONSGML v1.0//EN
@ -101,13 +119,15 @@ class TestCalDAVStorage(DAVStorageTests):
UID:{r} UID:{r}
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
''').strip()) """
).strip()
)
s.upload(too_old_item) s.upload(too_old_item)
s.upload(too_new_item) s.upload(too_new_item)
expected_href, _ = s.upload(good_item) expected_href, _ = s.upload(good_item)
(actual_href, _), = s.list() ((actual_href, _),) = s.list()
assert actual_href == expected_href assert actual_href == expected_href
def test_invalid_resource(self, monkeypatch, get_storage_args): def test_invalid_resource(self, monkeypatch, get_storage_args):
@ -115,37 +135,38 @@ class TestCalDAVStorage(DAVStorageTests):
args = get_storage_args(collection=None) args = get_storage_args(collection=None)
def request(session, method, url, **kwargs): def request(session, method, url, **kwargs):
assert url == args['url'] assert url == args["url"]
calls.append(None) calls.append(None)
r = requests.Response() r = requests.Response()
r.status_code = 200 r.status_code = 200
r._content = b'Hello World.' r._content = b"Hello World."
return r return r
monkeypatch.setattr('requests.sessions.Session.request', request) monkeypatch.setattr("requests.sessions.Session.request", request)
with pytest.raises(ValueError): with pytest.raises(ValueError):
s = self.storage_class(**args) s = self.storage_class(**args)
list(s.list()) list(s.list())
assert len(calls) == 1 assert len(calls) == 1
@pytest.mark.skipif(dav_server == 'icloud', @pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
reason='iCloud only accepts VEVENT') @pytest.mark.skipif(
@pytest.mark.skipif(dav_server == 'fastmail', dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
reason='Fastmail has non-standard hadling of VTODOs.') )
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
def test_item_types_general(self, s): def test_item_types_general(self, s):
event = s.upload(format_item(EVENT_TEMPLATE))[0] event = s.upload(format_item(EVENT_TEMPLATE))[0]
task = s.upload(format_item(TASK_TEMPLATE))[0] task = s.upload(format_item(TASK_TEMPLATE))[0]
s.item_types = ('VTODO', 'VEVENT') s.item_types = ("VTODO", "VEVENT")
def hrefs(): def hrefs():
return {href for href, etag in s.list()} return {href for href, etag in s.list()}
assert hrefs() == {event, task} assert hrefs() == {event, task}
s.item_types = ('VTODO',) s.item_types = ("VTODO",)
assert hrefs() == {task} assert hrefs() == {task}
s.item_types = ('VEVENT',) s.item_types = ("VEVENT",)
assert hrefs() == {event} assert hrefs() == {event}
s.item_types = () s.item_types = ()
assert hrefs() == {event, task} assert hrefs() == {event, task}

View file

@ -7,6 +7,6 @@ from vdirsyncer.storage.dav import CardDAVStorage
class TestCardDAVStorage(DAVStorageTests): class TestCardDAVStorage(DAVStorageTests):
storage_class = CardDAVStorage storage_class = CardDAVStorage
@pytest.fixture(params=['VCARD']) @pytest.fixture(params=["VCARD"])
def item_type(self, request): def item_type(self, request):
return request.param return request.param

View file

@ -6,37 +6,41 @@ from vdirsyncer.storage.dav import _parse_xml
def test_xml_utilities(): def test_xml_utilities():
x = _parse_xml(b'''<?xml version="1.0" encoding="UTF-8" ?> x = _parse_xml(
<D:multistatus xmlns:D="DAV:"> b"""<?xml version="1.0" encoding="UTF-8" ?>
<D:response> <multistatus xmlns="DAV:">
<D:propstat> <response>
<D:status>HTTP/1.1 404 Not Found</D:status> <propstat>
<D:prop> <status>HTTP/1.1 404 Not Found</status>
<D:getcontenttype/> <prop>
</D:prop> <getcontenttype/>
</D:propstat> </prop>
<D:propstat> </propstat>
<D:prop> <propstat>
<D:resourcetype> <prop>
<D:collection/> <resourcetype>
</D:resourcetype> <collection/>
</D:prop> </resourcetype>
</D:propstat> </prop>
</D:response> </propstat>
</D:multistatus> </response>
''') </multistatus>
"""
)
response = x.find('{DAV:}response') response = x.find("{DAV:}response")
props = _merge_xml(response.findall('{DAV:}propstat/{DAV:}prop')) props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
assert props.find('{DAV:}resourcetype/{DAV:}collection') is not None assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None
assert props.find('{DAV:}getcontenttype') is not None assert props.find("{DAV:}getcontenttype") is not None
@pytest.mark.parametrize('char', range(32)) @pytest.mark.parametrize("char", range(32))
def test_xml_specialchars(char): def test_xml_specialchars(char):
x = _parse_xml('<?xml version="1.0" encoding="UTF-8" ?>' x = _parse_xml(
'<foo>ye{}s\r\n' '<?xml version="1.0" encoding="UTF-8" ?>'
'hello</foo>'.format(chr(char)).encode('ascii')) "<foo>ye{}s\r\n"
"hello</foo>".format(chr(char)).encode("ascii")
)
if char in _BAD_XML_CHARS: if char in _BAD_XML_CHARS:
assert x.text == 'yes\nhello' assert x.text == "yes\nhello"

View file

@ -19,7 +19,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret! # SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#' SECRET_KEY = "d7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#"
# SECURITY WARNING: don't run with debug turned on in production! # SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True DEBUG = True
@ -30,56 +30,55 @@ ALLOWED_HOSTS = []
# Application definition # Application definition
INSTALLED_APPS = [ INSTALLED_APPS = [
'django.contrib.admin', "django.contrib.admin",
'django.contrib.auth', "django.contrib.auth",
'django.contrib.contenttypes', "django.contrib.contenttypes",
'django.contrib.sessions', "django.contrib.sessions",
'django.contrib.messages', "django.contrib.messages",
'django.contrib.staticfiles', "django.contrib.staticfiles",
'rest_framework', "rest_framework",
'rest_framework.authtoken', "rest_framework.authtoken",
'journal.apps.JournalConfig', "journal.apps.JournalConfig",
] ]
MIDDLEWARE = [ MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware', "django.middleware.security.SecurityMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware', "django.contrib.sessions.middleware.SessionMiddleware",
'django.middleware.common.CommonMiddleware', "django.middleware.common.CommonMiddleware",
'django.middleware.csrf.CsrfViewMiddleware', "django.middleware.csrf.CsrfViewMiddleware",
'django.contrib.auth.middleware.AuthenticationMiddleware', "django.contrib.auth.middleware.AuthenticationMiddleware",
'django.contrib.messages.middleware.MessageMiddleware', "django.contrib.messages.middleware.MessageMiddleware",
'django.middleware.clickjacking.XFrameOptionsMiddleware', "django.middleware.clickjacking.XFrameOptionsMiddleware",
] ]
ROOT_URLCONF = 'etesync_server.urls' ROOT_URLCONF = "etesync_server.urls"
TEMPLATES = [ TEMPLATES = [
{ {
'BACKEND': 'django.template.backends.django.DjangoTemplates', "BACKEND": "django.template.backends.django.DjangoTemplates",
'DIRS': [], "DIRS": [],
'APP_DIRS': True, "APP_DIRS": True,
'OPTIONS': { "OPTIONS": {
'context_processors': [ "context_processors": [
'django.template.context_processors.debug', "django.template.context_processors.debug",
'django.template.context_processors.request', "django.template.context_processors.request",
'django.contrib.auth.context_processors.auth', "django.contrib.auth.context_processors.auth",
'django.contrib.messages.context_processors.messages', "django.contrib.messages.context_processors.messages",
], ],
}, },
}, },
] ]
WSGI_APPLICATION = 'etesync_server.wsgi.application' WSGI_APPLICATION = "etesync_server.wsgi.application"
# Database # Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases # https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = { DATABASES = {
'default': { "default": {
'ENGINE': 'django.db.backends.sqlite3', "ENGINE": "django.db.backends.sqlite3",
'NAME': os.environ.get('ETESYNC_DB_PATH', "NAME": os.environ.get("ETESYNC_DB_PATH", os.path.join(BASE_DIR, "db.sqlite3")),
os.path.join(BASE_DIR, 'db.sqlite3')),
} }
} }
@ -89,16 +88,16 @@ DATABASES = {
AUTH_PASSWORD_VALIDATORS = [ AUTH_PASSWORD_VALIDATORS = [
{ {
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa
}, },
{ {
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa
}, },
{ {
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa
}, },
{ {
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # noqa
}, },
] ]
@ -106,9 +105,9 @@ AUTH_PASSWORD_VALIDATORS = [
# Internationalization # Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/ # https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us' LANGUAGE_CODE = "en-us"
TIME_ZONE = 'UTC' TIME_ZONE = "UTC"
USE_I18N = True USE_I18N = True
@ -120,4 +119,4 @@ USE_TZ = True
# Static files (CSS, JavaScript, Images) # Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/ # https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/' STATIC_URL = "/static/"

View file

@ -19,22 +19,19 @@ from journal import views
from rest_framework_nested import routers from rest_framework_nested import routers
router = routers.DefaultRouter() router = routers.DefaultRouter()
router.register(r'journals', views.JournalViewSet) router.register(r"journals", views.JournalViewSet)
router.register(r'journal/(?P<journal_uid>[^/]+)', views.EntryViewSet) router.register(r"journal/(?P<journal_uid>[^/]+)", views.EntryViewSet)
router.register(r'user', views.UserInfoViewSet) router.register(r"user", views.UserInfoViewSet)
journals_router = routers.NestedSimpleRouter(router, r'journals', journals_router = routers.NestedSimpleRouter(router, r"journals", lookup="journal")
lookup='journal') journals_router.register(r"members", views.MembersViewSet, base_name="journal-members")
journals_router.register(r'members', views.MembersViewSet, journals_router.register(r"entries", views.EntryViewSet, base_name="journal-entries")
base_name='journal-members')
journals_router.register(r'entries', views.EntryViewSet,
base_name='journal-entries')
urlpatterns = [ urlpatterns = [
url(r'^api/v1/', include(router.urls)), url(r"^api/v1/", include(router.urls)),
url(r'^api/v1/', include(journals_router.urls)), url(r"^api/v1/", include(journals_router.urls)),
] ]
# Adding this just for testing, this shouldn't be here normally # Adding this just for testing, this shouldn't be here normally
urlpatterns += url(r'^reset/$', views.reset, name='reset_debug'), urlpatterns += (url(r"^reset/$", views.reset, name="reset_debug"),)

View file

@ -10,24 +10,23 @@ from vdirsyncer.storage.etesync import EtesyncCalendars
from vdirsyncer.storage.etesync import EtesyncContacts from vdirsyncer.storage.etesync import EtesyncContacts
pytestmark = pytest.mark.skipif(os.getenv('ETESYNC_TESTS', '') != 'true', pytestmark = pytest.mark.skipif(
reason='etesync tests disabled') os.getenv("ETESYNC_TESTS", "") != "true", reason="etesync tests disabled"
)
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def etesync_app(tmpdir_factory): def etesync_app(tmpdir_factory):
sys.path.insert(0, os.path.join(os.path.dirname(__file__), sys.path.insert(0, os.path.join(os.path.dirname(__file__), "etesync_server"))
'etesync_server'))
db = tmpdir_factory.mktemp('etesync').join('etesync.sqlite') db = tmpdir_factory.mktemp("etesync").join("etesync.sqlite")
shutil.copy( shutil.copy(
os.path.join(os.path.dirname(__file__), 'etesync_server', os.path.join(os.path.dirname(__file__), "etesync_server", "db.sqlite3"), str(db)
'db.sqlite3'),
str(db)
) )
os.environ['ETESYNC_DB_PATH'] = str(db) os.environ["ETESYNC_DB_PATH"] = str(db)
from etesync_server.wsgi import application from etesync_server.wsgi import application
return application return application
@ -39,44 +38,44 @@ class EtesyncTests(StorageTests):
def get_storage_args(self, request, get_item, tmpdir, etesync_app): def get_storage_args(self, request, get_item, tmpdir, etesync_app):
import wsgi_intercept import wsgi_intercept
import wsgi_intercept.requests_intercept import wsgi_intercept.requests_intercept
wsgi_intercept.requests_intercept.install() wsgi_intercept.requests_intercept.install()
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 8000, wsgi_intercept.add_wsgi_intercept("127.0.0.1", 8000, lambda: etesync_app)
lambda: etesync_app)
def teardown(): def teardown():
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 8000) wsgi_intercept.remove_wsgi_intercept("127.0.0.1", 8000)
wsgi_intercept.requests_intercept.uninstall() wsgi_intercept.requests_intercept.uninstall()
request.addfinalizer(teardown) request.addfinalizer(teardown)
with open(os.path.join(os.path.dirname(__file__), with open(
'test@localhost/auth_token')) as f: os.path.join(os.path.dirname(__file__), "test@localhost/auth_token")
) as f:
token = f.read().strip() token = f.read().strip()
headers = {'Authorization': 'Token ' + token} headers = {"Authorization": "Token " + token}
r = requests.post('http://127.0.0.1:8000/reset/', headers=headers, r = requests.post(
allow_redirects=False) "http://127.0.0.1:8000/reset/", headers=headers, allow_redirects=False
)
assert r.status_code == 200 assert r.status_code == 200
def inner(collection='test'): def inner(collection="test"):
rv = { rv = {
'email': 'test@localhost', "email": "test@localhost",
'db_path': str(tmpdir.join('etesync.db')), "db_path": str(tmpdir.join("etesync.db")),
'secrets_dir': os.path.dirname(__file__), "secrets_dir": os.path.dirname(__file__),
'server_url': 'http://127.0.0.1:8000/' "server_url": "http://127.0.0.1:8000/",
} }
if collection is not None: if collection is not None:
rv = self.storage_class.create_collection( rv = self.storage_class.create_collection(collection=collection, **rv)
collection=collection,
**rv
)
return rv return rv
return inner return inner
class TestContacts(EtesyncTests): class TestContacts(EtesyncTests):
storage_class = EtesyncContacts storage_class = EtesyncContacts
@pytest.fixture(params=['VCARD']) @pytest.fixture(params=["VCARD"])
def item_type(self, request): def item_type(self, request):
return request.param return request.param
@ -84,6 +83,6 @@ class TestContacts(EtesyncTests):
class TestCalendars(EtesyncTests): class TestCalendars(EtesyncTests):
storage_class = EtesyncCalendars storage_class = EtesyncCalendars
@pytest.fixture(params=['VEVENT']) @pytest.fixture(params=["VEVENT"])
def item_type(self, request): def item_type(self, request):
return request.param return request.param

View file

@ -3,7 +3,7 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, request, tmpdir, slow_create_collection): def get_storage_args(self, request, tmpdir, slow_create_collection, baikal_server):
def inner(collection="test"): def inner(collection="test"):
base_url = "http://127.0.0.1:8002/" base_url = "http://127.0.0.1:8002/"
args = { args = {
@ -12,10 +12,10 @@ class ServerMixin:
"password": "baikal", "password": "baikal",
} }
if self.storage_class.fileext == '.vcf': if self.storage_class.fileext == ".vcf":
args['url'] = base_url + "card.php/" args["url"] = base_url + "card.php/"
else: else:
args['url'] = base_url + "cal.php/" args["url"] = base_url + "cal.php/"
if collection is not None: if collection is not None:
args = slow_create_collection(self.storage_class, args, collection) args = slow_create_collection(self.storage_class, args, collection)

View file

@ -1,4 +0,0 @@
#!/bin/sh
docker-compose build baikal
docker-compose up -d baikal

View file

@ -6,43 +6,42 @@ import pytest
try: try:
caldav_args = { caldav_args = {
# Those credentials are configured through the Travis UI # Those credentials are configured through the Travis UI
'username': os.environ['DAVICAL_USERNAME'].strip(), "username": os.environ["DAVICAL_USERNAME"].strip(),
'password': os.environ['DAVICAL_PASSWORD'].strip(), "password": os.environ["DAVICAL_PASSWORD"].strip(),
'url': 'https://brutus.lostpackets.de/davical-test/caldav.php/', "url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
} }
except KeyError as e: except KeyError as e:
pytestmark = pytest.mark.skip('Missing envkey: {}'.format(str(e))) pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e)))
@pytest.mark.flaky(reruns=5) @pytest.mark.flaky(reruns=5)
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def davical_args(self): def davical_args(self):
if self.storage_class.fileext == '.ics': if self.storage_class.fileext == ".ics":
return dict(caldav_args) return dict(caldav_args)
elif self.storage_class.fileext == '.vcf': elif self.storage_class.fileext == ".vcf":
pytest.skip('No carddav') pytest.skip("No carddav")
else: else:
raise RuntimeError() raise RuntimeError()
@pytest.fixture @pytest.fixture
def get_storage_args(self, davical_args, request): def get_storage_args(self, davical_args, request):
def inner(collection='test'): def inner(collection="test"):
if collection is None: if collection is None:
return davical_args return davical_args
assert collection.startswith('test') assert collection.startswith("test")
for _ in range(4): for _ in range(4):
args = self.storage_class.create_collection( args = self.storage_class.create_collection(
collection + str(uuid.uuid4()), collection + str(uuid.uuid4()), **davical_args
**davical_args
) )
s = self.storage_class(**args) s = self.storage_class(**args)
if not list(s.list()): if not list(s.list()):
request.addfinalizer( request.addfinalizer(lambda: s.session.request("DELETE", ""))
lambda: s.session.request('DELETE', ''))
return args return args
raise RuntimeError('Failed to find free collection.') raise RuntimeError("Failed to find free collection.")
return inner return inner

View file

@ -4,29 +4,28 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, item_type, slow_create_collection): def get_storage_args(self, item_type, slow_create_collection):
if item_type != 'VEVENT': if item_type != "VEVENT":
# iCloud collections can either be calendars or task lists. # iCloud collections can either be calendars or task lists.
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
pytest.skip('iCloud doesn\'t support anything else than VEVENT') pytest.skip("iCloud doesn't support anything else than VEVENT")
def inner(collection='test'): def inner(collection="test"):
args = { args = {
'username': os.environ['ICLOUD_USERNAME'], "username": os.environ["ICLOUD_USERNAME"],
'password': os.environ['ICLOUD_PASSWORD'] "password": os.environ["ICLOUD_PASSWORD"],
} }
if self.storage_class.fileext == '.ics': if self.storage_class.fileext == ".ics":
args['url'] = 'https://caldav.icloud.com/' args["url"] = "https://caldav.icloud.com/"
elif self.storage_class.fileext == '.vcf': elif self.storage_class.fileext == ".vcf":
args['url'] = 'https://contacts.icloud.com/' args["url"] = "https://contacts.icloud.com/"
else: else:
raise RuntimeError() raise RuntimeError()
if collection is not None: if collection is not None:
args = slow_create_collection(self.storage_class, args, args = slow_create_collection(self.storage_class, args, collection)
collection)
return args return args
return inner return inner

View file

@ -1 +0,0 @@
mysteryshack

View file

@ -1,72 +0,0 @@
import os
import shutil
import subprocess
import time
import pytest
import requests
testserver_repo = os.path.dirname(__file__)
make_sh = os.path.abspath(os.path.join(testserver_repo, 'make.sh'))
def wait():
for i in range(100):
try:
requests.get('http://127.0.0.1:6767/', verify=False)
except Exception as e:
# Don't know exact exception class, don't care.
# Also, https://github.com/kennethreitz/requests/issues/2192
if 'connection refused' not in str(e).lower():
raise
time.sleep(2 ** i)
else:
return True
return False
class ServerMixin:
@pytest.fixture(scope='session')
def setup_mysteryshack_server(self, xprocess):
def preparefunc(cwd):
return wait, ['sh', make_sh, 'testserver']
subprocess.check_call(['sh', make_sh, 'testserver-config'])
xprocess.ensure('mysteryshack_server', preparefunc)
return subprocess.check_output([
os.path.join(
testserver_repo,
'mysteryshack/target/debug/mysteryshack'
),
'-c', '/tmp/mysteryshack/config',
'user',
'authorize',
'testuser',
'https://example.com',
self.storage_class.scope + ':rw'
]).strip().decode()
@pytest.fixture
def get_storage_args(self, monkeypatch, setup_mysteryshack_server):
from requests import Session
monkeypatch.setitem(os.environ, 'OAUTHLIB_INSECURE_TRANSPORT', 'true')
old_request = Session.request
def request(self, method, url, **kw):
url = url.replace('https://', 'http://')
return old_request(self, method, url, **kw)
monkeypatch.setattr(Session, 'request', request)
shutil.rmtree('/tmp/mysteryshack/testuser/data', ignore_errors=True)
shutil.rmtree('/tmp/mysteryshack/testuser/meta', ignore_errors=True)
def inner(**kw):
kw['account'] = 'testuser@127.0.0.1:6767'
kw['access_token'] = setup_mysteryshack_server
if self.storage_class.fileext == '.ics':
kw.setdefault('collection', 'test')
return kw
return inner

View file

@ -1,18 +0,0 @@
#!/bin/sh
set -ex
cd "$(dirname "$0")"
. ./variables.sh
if [ "$CI" = "true" ]; then
curl -sL https://static.rust-lang.org/rustup.sh -o ~/rust-installer/rustup.sh
sh ~/rust-installer/rustup.sh --prefix=~/rust --spec=stable -y --disable-sudo 2> /dev/null
fi
if [ ! -d mysteryshack ]; then
git clone https://github.com/untitaker/mysteryshack
fi
pip install pytest-xprocess
cd mysteryshack
make debug-build # such that first test doesn't hang too long w/o output

View file

@ -1,9 +0,0 @@
#!/bin/sh
set -e
# pytest-xprocess doesn't allow us to CD into a particular directory before
# launching a command, so we do it here.
cd "$(dirname "$0")"
. ./variables.sh
cd mysteryshack
exec make "$@"

View file

@ -1 +0,0 @@
export PATH="$PATH:$HOME/.cargo/bin/"

@ -1 +0,0 @@
Subproject commit a27144ddcf39a3283179a4f7ce1ab22b2e810205

@ -1 +0,0 @@
Subproject commit bb4fcc6f524467d58c95f1dcec8470fdfcd65adf

View file

@ -3,7 +3,13 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, request, tmpdir, slow_create_collection): def get_storage_args(
self,
request,
tmpdir,
slow_create_collection,
radicale_server,
):
def inner(collection="test"): def inner(collection="test"):
url = "http://127.0.0.1:8001/" url = "http://127.0.0.1:8001/"
args = { args = {

View file

@ -1,4 +0,0 @@
#!/bin/sh
docker-compose build radicale
docker-compose up -d radicale

View file

@ -2,7 +2,6 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self): def get_storage_args(self):
pytest.skip('DAV tests disabled.') pytest.skip("DAV tests disabled.")

View file

@ -3,7 +3,13 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, request, tmpdir, slow_create_collection): def get_storage_args(
self,
request,
tmpdir,
slow_create_collection,
xandikos_server,
):
def inner(collection="test"): def inner(collection="test"):
url = "http://127.0.0.1:8000/" url = "http://127.0.0.1:8000/"
args = {"url": url} args = {"url": url}

View file

@ -1,4 +0,0 @@
#!/bin/sh
docker-compose build xandikos
docker-compose up -d xandikos

View file

@ -12,72 +12,106 @@ class TestFilesystemStorage(StorageTests):
@pytest.fixture @pytest.fixture
def get_storage_args(self, tmpdir): def get_storage_args(self, tmpdir):
def inner(collection='test'): def inner(collection="test"):
rv = {'path': str(tmpdir), 'fileext': '.txt', 'collection': rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
collection}
if collection is not None: if collection is not None:
rv = self.storage_class.create_collection(**rv) rv = self.storage_class.create_collection(**rv)
return rv return rv
return inner return inner
def test_is_not_directory(self, tmpdir): def test_is_not_directory(self, tmpdir):
with pytest.raises(OSError): with pytest.raises(OSError):
f = tmpdir.join('hue') f = tmpdir.join("hue")
f.write('stub') f.write("stub")
self.storage_class(str(tmpdir) + '/hue', '.txt') self.storage_class(str(tmpdir) + "/hue", ".txt")
def test_broken_data(self, tmpdir): def test_broken_data(self, tmpdir):
s = self.storage_class(str(tmpdir), '.txt') s = self.storage_class(str(tmpdir), ".txt")
class BrokenItem: class BrokenItem:
raw = 'Ц, Ш, Л, ж, Д, З, Ю'.encode() raw = "Ц, Ш, Л, ж, Д, З, Ю".encode()
uid = 'jeezus' uid = "jeezus"
ident = uid ident = uid
with pytest.raises(TypeError): with pytest.raises(TypeError):
s.upload(BrokenItem) s.upload(BrokenItem)
assert not tmpdir.listdir() assert not tmpdir.listdir()
def test_ident_with_slash(self, tmpdir): def test_ident_with_slash(self, tmpdir):
s = self.storage_class(str(tmpdir), '.txt') s = self.storage_class(str(tmpdir), ".txt")
s.upload(Item('UID:a/b/c')) s.upload(Item("UID:a/b/c"))
item_file, = tmpdir.listdir() (item_file,) = tmpdir.listdir()
assert '/' not in item_file.basename and item_file.isfile() assert "/" not in item_file.basename and item_file.isfile()
def test_ignore_tmp_files(self, tmpdir):
"""Test that files with .tmp suffix beside .ics files are ignored."""
s = self.storage_class(str(tmpdir), ".ics")
s.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(ext="tmp"))
assert len(tmpdir.listdir()) == 2
assert len(list(s.list())) == 1
def test_ignore_tmp_files_empty_fileext(self, tmpdir):
"""Test that files with .tmp suffix are ignored with empty fileext."""
s = self.storage_class(str(tmpdir), "")
s.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(ext="tmp"))
assert len(tmpdir.listdir()) == 2
# assert False, tmpdir.listdir() # enable to see the created filename
assert len(list(s.list())) == 1
def test_ignore_files_typical_backup(self, tmpdir):
"""Test file-name ignorance with typical backup ending ~."""
ignorext = "~" # without dot
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
storage.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
assert len(tmpdir.listdir()) == 2
assert len(list(storage.list())) == 1
def test_too_long_uid(self, tmpdir): def test_too_long_uid(self, tmpdir):
s = self.storage_class(str(tmpdir), '.txt') storage = self.storage_class(str(tmpdir), ".txt")
item = Item('UID:' + 'hue' * 600) item = Item("UID:" + "hue" * 600)
href, etag = s.upload(item)
href, etag = storage.upload(item)
assert item.uid not in href assert item.uid not in href
def test_post_hook_inactive(self, tmpdir, monkeypatch): def test_post_hook_inactive(self, tmpdir, monkeypatch):
def check_call_mock(*args, **kwargs): def check_call_mock(*args, **kwargs):
raise AssertionError() raise AssertionError()
monkeypatch.setattr(subprocess, 'call', check_call_mock) monkeypatch.setattr(subprocess, "call", check_call_mock)
s = self.storage_class(str(tmpdir), '.txt', post_hook=None) s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
s.upload(Item('UID:a/b/c')) s.upload(Item("UID:a/b/c"))
def test_post_hook_active(self, tmpdir, monkeypatch): def test_post_hook_active(self, tmpdir, monkeypatch):
calls = [] calls = []
exe = 'foo' exe = "foo"
def check_call_mock(call, *args, **kwargs): def check_call_mock(call, *args, **kwargs):
calls.append(True) calls.append(True)
assert len(call) == 2 assert len(call) == 2
assert call[0] == exe assert call[0] == exe
monkeypatch.setattr(subprocess, 'call', check_call_mock) monkeypatch.setattr(subprocess, "call", check_call_mock)
s = self.storage_class(str(tmpdir), '.txt', post_hook=exe) s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
s.upload(Item('UID:a/b/c')) s.upload(Item("UID:a/b/c"))
assert calls assert calls
def test_ignore_git_dirs(self, tmpdir): def test_ignore_git_dirs(self, tmpdir):
tmpdir.mkdir('.git').mkdir('foo') tmpdir.mkdir(".git").mkdir("foo")
tmpdir.mkdir('a') tmpdir.mkdir("a")
tmpdir.mkdir('b') tmpdir.mkdir("b")
assert {c['collection'] for c assert {c["collection"] for c in self.storage_class.discover(str(tmpdir))} == {
in self.storage_class.discover(str(tmpdir))} == {'a', 'b'} "a",
"b",
}

View file

@ -8,42 +8,44 @@ from vdirsyncer.storage.http import prepare_auth
def test_list(monkeypatch): def test_list(monkeypatch):
collection_url = 'http://127.0.0.1/calendar/collection.ics' collection_url = "http://127.0.0.1/calendar/collection.ics"
items = [ items = [
('BEGIN:VEVENT\n' (
'SUMMARY:Eine Kurzinfo\n' "BEGIN:VEVENT\n"
'DESCRIPTION:Beschreibung des Termines\n' "SUMMARY:Eine Kurzinfo\n"
'END:VEVENT'), "DESCRIPTION:Beschreibung des Termines\n"
('BEGIN:VEVENT\n' "END:VEVENT"
'SUMMARY:Eine zweite Küèrzinfo\n' ),
'DESCRIPTION:Beschreibung des anderen Termines\n' (
'BEGIN:VALARM\n' "BEGIN:VEVENT\n"
'ACTION:AUDIO\n' "SUMMARY:Eine zweite Küèrzinfo\n"
'TRIGGER:19980403T120000\n' "DESCRIPTION:Beschreibung des anderen Termines\n"
'ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n' "BEGIN:VALARM\n"
'REPEAT:4\n' "ACTION:AUDIO\n"
'DURATION:PT1H\n' "TRIGGER:19980403T120000\n"
'END:VALARM\n' "ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n"
'END:VEVENT') "REPEAT:4\n"
"DURATION:PT1H\n"
"END:VALARM\n"
"END:VEVENT"
),
] ]
responses = [ responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2
'\n'.join(['BEGIN:VCALENDAR'] + items + ['END:VCALENDAR'])
] * 2
def get(self, method, url, *a, **kw): def get(self, method, url, *a, **kw):
assert method == 'GET' assert method == "GET"
assert url == collection_url assert url == collection_url
r = Response() r = Response()
r.status_code = 200 r.status_code = 200
assert responses assert responses
r._content = responses.pop().encode('utf-8') r._content = responses.pop().encode("utf-8")
r.headers['Content-Type'] = 'text/calendar' r.headers["Content-Type"] = "text/calendar"
r.encoding = 'ISO-8859-1' r.encoding = "ISO-8859-1"
return r return r
monkeypatch.setattr('requests.sessions.Session.request', get) monkeypatch.setattr("requests.sessions.Session.request", get)
s = HttpStorage(url=collection_url) s = HttpStorage(url=collection_url)
@ -55,8 +57,9 @@ def test_list(monkeypatch):
assert etag2 == etag assert etag2 == etag
found_items[normalize_item(item)] = href found_items[normalize_item(item)] = href
expected = {normalize_item('BEGIN:VCALENDAR\n' + x + '\nEND:VCALENDAR') expected = {
for x in items} normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
}
assert set(found_items) == expected assert set(found_items) == expected
@ -68,7 +71,7 @@ def test_list(monkeypatch):
def test_readonly_param(): def test_readonly_param():
url = 'http://example.com/' url = "http://example.com/"
with pytest.raises(ValueError): with pytest.raises(ValueError):
HttpStorage(url=url, read_only=False) HttpStorage(url=url, read_only=False)
@ -78,43 +81,43 @@ def test_readonly_param():
def test_prepare_auth(): def test_prepare_auth():
assert prepare_auth(None, '', '') is None assert prepare_auth(None, "", "") is None
assert prepare_auth(None, 'user', 'pwd') == ('user', 'pwd') assert prepare_auth(None, "user", "pwd") == ("user", "pwd")
assert prepare_auth('basic', 'user', 'pwd') == ('user', 'pwd') assert prepare_auth("basic", "user", "pwd") == ("user", "pwd")
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
assert prepare_auth('basic', '', 'pwd') assert prepare_auth("basic", "", "pwd")
assert 'you need to specify username and password' in \ assert "you need to specify username and password" in str(excinfo.value).lower()
str(excinfo.value).lower()
from requests.auth import HTTPDigestAuth from requests.auth import HTTPDigestAuth
assert isinstance(prepare_auth('digest', 'user', 'pwd'),
HTTPDigestAuth) assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
prepare_auth('ladida', 'user', 'pwd') prepare_auth("ladida", "user", "pwd")
assert 'unknown authentication method' in str(excinfo.value).lower() assert "unknown authentication method" in str(excinfo.value).lower()
def test_prepare_auth_guess(monkeypatch): def test_prepare_auth_guess(monkeypatch):
import requests_toolbelt.auth.guess import requests_toolbelt.auth.guess
assert isinstance(prepare_auth('guess', 'user', 'pwd'), assert isinstance(
requests_toolbelt.auth.guess.GuessAuth) prepare_auth("guess", "user", "pwd"), requests_toolbelt.auth.guess.GuessAuth
)
monkeypatch.delattr(requests_toolbelt.auth.guess, 'GuessAuth') monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
with pytest.raises(UserError) as excinfo: with pytest.raises(UserError) as excinfo:
prepare_auth('guess', 'user', 'pwd') prepare_auth("guess", "user", "pwd")
assert 'requests_toolbelt is too old' in str(excinfo.value).lower() assert "requests_toolbelt is too old" in str(excinfo.value).lower()
def test_verify_false_disallowed(): def test_verify_false_disallowed():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
HttpStorage(url='http://example.com', verify=False) HttpStorage(url="http://example.com", verify=False)
assert 'forbidden' in str(excinfo.value).lower() assert "forbidden" in str(excinfo.value).lower()
assert 'consider setting verify_fingerprint' in str(excinfo.value).lower() assert "consider setting verify_fingerprint" in str(excinfo.value).lower()

View file

@ -8,13 +8,14 @@ from vdirsyncer.storage.singlefile import SingleFileStorage
class CombinedStorage(Storage): class CombinedStorage(Storage):
'''A subclass of HttpStorage to make testing easier. It supports writes via """A subclass of HttpStorage to make testing easier. It supports writes via
SingleFileStorage.''' SingleFileStorage."""
_repr_attributes = ('url', 'path')
storage_name = 'http_and_singlefile' _repr_attributes = ("url", "path")
storage_name = "http_and_singlefile"
def __init__(self, url, path, **kwargs): def __init__(self, url, path, **kwargs):
if kwargs.get('collection', None) is not None: if kwargs.get("collection", None) is not None:
raise ValueError() raise ValueError()
super().__init__(**kwargs) super().__init__(**kwargs)
@ -48,30 +49,30 @@ class TestHttpStorage(StorageTests):
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def setup_tmpdir(self, tmpdir, monkeypatch): def setup_tmpdir(self, tmpdir, monkeypatch):
self.tmpfile = str(tmpdir.ensure('collection.txt')) self.tmpfile = str(tmpdir.ensure("collection.txt"))
def _request(method, url, *args, **kwargs): def _request(method, url, *args, **kwargs):
assert method == 'GET' assert method == "GET"
assert url == 'http://localhost:123/collection.txt' assert url == "http://localhost:123/collection.txt"
assert 'vdirsyncer' in kwargs['headers']['User-Agent'] assert "vdirsyncer" in kwargs["headers"]["User-Agent"]
r = Response() r = Response()
r.status_code = 200 r.status_code = 200
try: try:
with open(self.tmpfile, 'rb') as f: with open(self.tmpfile, "rb") as f:
r._content = f.read() r._content = f.read()
except OSError: except OSError:
r._content = b'' r._content = b""
r.headers['Content-Type'] = 'text/calendar' r.headers["Content-Type"] = "text/calendar"
r.encoding = 'utf-8' r.encoding = "utf-8"
return r return r
monkeypatch.setattr(vdirsyncer.storage.http, 'request', _request) monkeypatch.setattr(vdirsyncer.storage.http, "request", _request)
@pytest.fixture @pytest.fixture
def get_storage_args(self): def get_storage_args(self):
def inner(collection=None): def inner(collection=None):
assert collection is None assert collection is None
return {'url': 'http://localhost:123/collection.txt', return {"url": "http://localhost:123/collection.txt", "path": self.tmpfile}
'path': self.tmpfile}
return inner return inner

View file

@ -11,10 +11,10 @@ class TestSingleFileStorage(StorageTests):
@pytest.fixture @pytest.fixture
def get_storage_args(self, tmpdir): def get_storage_args(self, tmpdir):
def inner(collection='test'): def inner(collection="test"):
rv = {'path': str(tmpdir.join('%s.txt')), rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
'collection': collection}
if collection is not None: if collection is not None:
rv = self.storage_class.create_collection(**rv) rv = self.storage_class.create_collection(**rv)
return rv return rv
return inner return inner

View file

@ -9,20 +9,24 @@ import vdirsyncer.cli as cli
class _CustomRunner: class _CustomRunner:
def __init__(self, tmpdir): def __init__(self, tmpdir):
self.tmpdir = tmpdir self.tmpdir = tmpdir
self.cfg = tmpdir.join('config') self.cfg = tmpdir.join("config")
self.runner = CliRunner() self.runner = CliRunner()
def invoke(self, args, env=None, **kwargs): def invoke(self, args, env=None, **kwargs):
env = env or {} env = env or {}
env.setdefault('VDIRSYNCER_CONFIG', str(self.cfg)) env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg))
return self.runner.invoke(cli.app, args, env=env, **kwargs) return self.runner.invoke(cli.app, args, env=env, **kwargs)
def write_with_general(self, data): def write_with_general(self, data):
self.cfg.write(dedent(''' self.cfg.write(
dedent(
"""
[general] [general]
status_path = "{}/status/" status_path = "{}/status/"
''').format(str(self.tmpdir))) """
self.cfg.write(data, mode='a') ).format(str(self.tmpdir))
)
self.cfg.write(data, mode="a")
@pytest.fixture @pytest.fixture

View file

@ -15,16 +15,18 @@ invalid = object()
def read_config(tmpdir, monkeypatch): def read_config(tmpdir, monkeypatch):
def inner(cfg): def inner(cfg):
errors = [] errors = []
monkeypatch.setattr('vdirsyncer.cli.cli_logger.error', errors.append) monkeypatch.setattr("vdirsyncer.cli.cli_logger.error", errors.append)
f = io.StringIO(dedent(cfg.format(base=str(tmpdir)))) f = io.StringIO(dedent(cfg.format(base=str(tmpdir))))
rv = Config.from_fileobject(f) rv = Config.from_fileobject(f)
monkeypatch.undo() monkeypatch.undo()
return errors, rv return errors, rv
return inner return inner
def test_read_config(read_config): def test_read_config(read_config):
errors, c = read_config(''' errors, c = read_config(
"""
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -42,25 +44,32 @@ def test_read_config(read_config):
[storage bob_b] [storage bob_b]
type = "carddav" type = "carddav"
''') """
)
assert c.general == {'status_path': '/tmp/status/'} assert c.general == {"status_path": "/tmp/status/"}
assert set(c.pairs) == {'bob'} assert set(c.pairs) == {"bob"}
bob = c.pairs['bob'] bob = c.pairs["bob"]
assert bob.collections is None assert bob.collections is None
assert c.storages == { assert c.storages == {
'bob_a': {'type': 'filesystem', 'path': '/tmp/contacts/', 'fileext': "bob_a": {
'.vcf', 'yesno': False, 'number': 42, "type": "filesystem",
'instance_name': 'bob_a'}, "path": "/tmp/contacts/",
'bob_b': {'type': 'carddav', 'instance_name': 'bob_b'} "fileext": ".vcf",
"yesno": False,
"number": 42,
"instance_name": "bob_a",
},
"bob_b": {"type": "carddav", "instance_name": "bob_b"},
} }
def test_missing_collections_param(read_config): def test_missing_collections_param(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -73,27 +82,31 @@ def test_missing_collections_param(read_config):
[storage bob_b] [storage bob_b]
type = "lmao" type = "lmao"
''') """
)
assert 'collections parameter missing' in str(excinfo.value) assert "collections parameter missing" in str(excinfo.value)
def test_invalid_section_type(read_config): def test_invalid_section_type(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
[bogus] [bogus]
''') """
)
assert 'Unknown section' in str(excinfo.value) assert "Unknown section" in str(excinfo.value)
assert 'bogus' in str(excinfo.value) assert "bogus" in str(excinfo.value)
def test_missing_general_section(read_config): def test_missing_general_section(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[pair my_pair] [pair my_pair]
a = "my_a" a = "my_a"
b = "my_b" b = "my_b"
@ -108,40 +121,46 @@ def test_missing_general_section(read_config):
type = "filesystem" type = "filesystem"
path = "{base}/path_b/" path = "{base}/path_b/"
fileext = ".txt" fileext = ".txt"
''') """
)
assert 'Invalid general section.' in str(excinfo.value) assert "Invalid general section." in str(excinfo.value)
def test_wrong_general_section(read_config): def test_wrong_general_section(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
wrong = true wrong = true
''') """
)
assert 'Invalid general section.' in str(excinfo.value) assert "Invalid general section." in str(excinfo.value)
assert excinfo.value.problems == [ assert excinfo.value.problems == [
'general section doesn\'t take the parameters: wrong', "general section doesn't take the parameters: wrong",
'general section is missing the parameters: status_path' "general section is missing the parameters: status_path",
] ]
def test_invalid_storage_name(read_config): def test_invalid_storage_name(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
status_path = "{base}/status/" status_path = "{base}/status/"
[storage foo.bar] [storage foo.bar]
''') """
)
assert 'invalid characters' in str(excinfo.value).lower() assert "invalid characters" in str(excinfo.value).lower()
def test_invalid_collections_arg(read_config): def test_invalid_collections_arg(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -159,14 +178,16 @@ def test_invalid_collections_arg(read_config):
type = "filesystem" type = "filesystem"
path = "/tmp/bar/" path = "/tmp/bar/"
fileext = ".txt" fileext = ".txt"
''') """
)
assert 'Expected string' in str(excinfo.value) assert "Expected string" in str(excinfo.value)
def test_duplicate_sections(read_config): def test_duplicate_sections(read_config):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
read_config(''' read_config(
"""
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -184,7 +205,8 @@ def test_duplicate_sections(read_config):
type = "filesystem" type = "filesystem"
path = "/tmp/bar/" path = "/tmp/bar/"
fileext = ".txt" fileext = ".txt"
''') """
)
assert 'Name "foobar" already used' in str(excinfo.value) assert 'Name "foobar" already used' in str(excinfo.value)

View file

@ -1,15 +1,16 @@
import json import json
from textwrap import dedent from textwrap import dedent
import hypothesis.strategies as st import pytest
from hypothesis import given
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.base import Storage from vdirsyncer.storage.base import Storage
def test_discover_command(tmpdir, runner): def test_discover_command(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{0}/foo/" path = "{0}/foo/"
@ -24,50 +25,51 @@ def test_discover_command(tmpdir, runner):
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = ["from a"] collections = ["from a"]
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
foo = tmpdir.mkdir('foo') foo = tmpdir.mkdir("foo")
bar = tmpdir.mkdir('bar') bar = tmpdir.mkdir("bar")
for x in 'abc': for x in "abc":
foo.mkdir(x) foo.mkdir(x)
bar.mkdir(x) bar.mkdir(x)
bar.mkdir('d') bar.mkdir("d")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
foo.mkdir('d') foo.mkdir("d")
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
lines = result.output.splitlines() lines = result.output.splitlines()
assert 'Syncing foobar/a' in lines assert "Syncing foobar/a" in lines
assert 'Syncing foobar/b' in lines assert "Syncing foobar/b" in lines
assert 'Syncing foobar/c' in lines assert "Syncing foobar/c" in lines
assert 'Syncing foobar/d' not in result.output assert "Syncing foobar/d" not in result.output
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert 'Syncing foobar/a' in lines assert "Syncing foobar/a" in lines
assert 'Syncing foobar/b' in lines assert "Syncing foobar/b" in lines
assert 'Syncing foobar/c' in lines assert "Syncing foobar/c" in lines
assert 'Syncing foobar/d' in result.output assert "Syncing foobar/d" in result.output
# Check for redundant data that is already in the config. This avoids # Check for redundant data that is already in the config. This avoids
# copying passwords from the config too. # copying passwords from the config too.
assert 'fileext' not in tmpdir \ assert "fileext" not in tmpdir.join("status").join("foobar.collections").read()
.join('status') \
.join('foobar.collections') \
.read()
def test_discover_different_collection_names(tmpdir, runner): def test_discover_different_collection_names(tmpdir, runner):
foo = tmpdir.mkdir('foo') foo = tmpdir.mkdir("foo")
bar = tmpdir.mkdir('bar') bar = tmpdir.mkdir("bar")
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
@ -85,35 +87,39 @@ def test_discover_different_collection_names(tmpdir, runner):
["coll1", "coll_a1", "coll_b1"], ["coll1", "coll_a1", "coll_b1"],
"coll2" "coll2"
] ]
''').format(foo=str(foo), bar=str(bar))) """
).format(foo=str(foo), bar=str(bar))
)
result = runner.invoke(['discover'], input='y\n' * 6) result = runner.invoke(["discover"], input="y\n" * 6)
assert not result.exception assert not result.exception
coll_a1 = foo.join('coll_a1') coll_a1 = foo.join("coll_a1")
coll_b1 = bar.join('coll_b1') coll_b1 = bar.join("coll_b1")
assert coll_a1.exists() assert coll_a1.exists()
assert coll_b1.exists() assert coll_b1.exists()
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
foo_txt = coll_a1.join('foo.txt') foo_txt = coll_a1.join("foo.txt")
foo_txt.write('BEGIN:VCALENDAR\nUID:foo\nEND:VCALENDAR') foo_txt.write("BEGIN:VCALENDAR\nUID:foo\nEND:VCALENDAR")
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert foo_txt.exists() assert foo_txt.exists()
assert coll_b1.join('foo.txt').exists() assert coll_b1.join("foo.txt").exists()
def test_discover_direct_path(tmpdir, runner): def test_discover_direct_path(tmpdir, runner):
foo = tmpdir.join('foo') foo = tmpdir.join("foo")
bar = tmpdir.join('bar') bar = tmpdir.join("bar")
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
@ -128,12 +134,14 @@ def test_discover_direct_path(tmpdir, runner):
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = null collections = null
''').format(foo=str(foo), bar=str(bar))) """
).format(foo=str(foo), bar=str(bar))
)
result = runner.invoke(['discover'], input='y\n' * 2) result = runner.invoke(["discover"], input="y\n" * 2)
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert foo.exists() assert foo.exists()
@ -141,7 +149,9 @@ def test_discover_direct_path(tmpdir, runner):
def test_null_collection_with_named_collection(tmpdir, runner): def test_null_collection_with_named_collection(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -155,43 +165,56 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage bar] [storage bar]
type = "singlefile" type = "singlefile"
path = "{base}/bar.txt" path = "{base}/bar.txt"
'''.format(base=str(tmpdir)))) """.format(
base=str(tmpdir)
)
)
)
result = runner.invoke(['discover'], input='y\n' * 2) result = runner.invoke(["discover"], input="y\n" * 2)
assert not result.exception assert not result.exception
foo = tmpdir.join('foo') foo = tmpdir.join("foo")
foobaz = foo.join('baz') foobaz = foo.join("baz")
assert foo.exists() assert foo.exists()
assert foobaz.exists() assert foobaz.exists()
bar = tmpdir.join('bar.txt') bar = tmpdir.join("bar.txt")
assert bar.exists() assert bar.exists()
foobaz.join('lol.txt').write('BEGIN:VCARD\nUID:HAHA\nEND:VCARD') foobaz.join("lol.txt").write("BEGIN:VCARD\nUID:HAHA\nEND:VCARD")
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert 'HAHA' in bar.read() assert "HAHA" in bar.read()
@given(a_requires=st.booleans(), b_requires=st.booleans()) @pytest.mark.parametrize(
def test_collection_required(a_requires, b_requires, tmpdir, runner, "a_requires,b_requires",
monkeypatch): [
(True, True),
(True, False),
(False, True),
(False, False),
],
)
def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch):
class TestStorage(Storage): class TestStorage(Storage):
storage_name = 'test' storage_name = "test"
def __init__(self, require_collection, **kw): def __init__(self, require_collection, **kw):
if require_collection: if require_collection:
assert not kw.get('collection') assert not kw.get("collection")
raise exceptions.CollectionRequired() raise exceptions.CollectionRequired()
from vdirsyncer.cli.utils import storage_names from vdirsyncer.cli.utils import storage_names
monkeypatch.setitem(storage_names._storages, 'test', TestStorage)
runner.write_with_general(dedent(''' monkeypatch.setitem(storage_names._storages, "test", TestStorage)
runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -204,11 +227,15 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner,
[storage bar] [storage bar]
type = "test" type = "test"
require_collection = {b} require_collection = {b}
'''.format(a=json.dumps(a_requires), b=json.dumps(b_requires)))) """.format(
a=json.dumps(a_requires), b=json.dumps(b_requires)
)
)
)
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
if a_requires or b_requires: if a_requires or b_requires:
assert result.exception assert result.exception
assert \ assert (
'One or more storages don\'t support `collections = null`.' in \ "One or more storages don't support `collections = null`." in result.output
result.output )

View file

@ -2,7 +2,9 @@ from textwrap import dedent
def test_get_password_from_command(tmpdir, runner): def test_get_password_from_command(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -17,26 +19,30 @@ def test_get_password_from_command(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{base}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"] fileext.fetch = ["prompt", "Fileext for bar"]
'''.format(base=str(tmpdir)))) """.format(
base=str(tmpdir)
)
)
)
foo = tmpdir.ensure('foo', dir=True) foo = tmpdir.ensure("foo", dir=True)
foo.ensure('a', dir=True) foo.ensure("a", dir=True)
foo.ensure('b', dir=True) foo.ensure("b", dir=True)
foo.ensure('c', dir=True) foo.ensure("c", dir=True)
bar = tmpdir.ensure('bar', dir=True) bar = tmpdir.ensure("bar", dir=True)
bar.ensure('a', dir=True) bar.ensure("a", dir=True)
bar.ensure('b', dir=True) bar.ensure("b", dir=True)
bar.ensure('c', dir=True) bar.ensure("c", dir=True)
result = runner.invoke(['discover'], input='.asdf\n') result = runner.invoke(["discover"], input=".asdf\n")
assert not result.exception assert not result.exception
status = tmpdir.join('status').join('foobar.collections').read() status = tmpdir.join("status").join("foobar.collections").read()
assert 'foo' in status assert "foo" in status
assert 'bar' in status assert "bar" in status
assert 'asdf' not in status assert "asdf" not in status
assert 'txt' not in status assert "txt" not in status
foo.join('a').join('foo.txt').write('BEGIN:VCARD\nUID:foo\nEND:VCARD') foo.join("a").join("foo.txt").write("BEGIN:VCARD\nUID:foo\nEND:VCARD")
result = runner.invoke(['sync'], input='.asdf\n') result = runner.invoke(["sync"], input=".asdf\n")
assert not result.exception assert not result.exception
assert [x.basename for x in bar.join('a').listdir()] == ['foo.asdf'] assert [x.basename for x in bar.join("a").listdir()] == ["foo.asdf"]

View file

@ -5,67 +5,72 @@ import pytest
@pytest.fixture @pytest.fixture
def storage(tmpdir, runner): def storage(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{base}/foo/"
fileext = ".txt" fileext = ".txt"
''').format(base=str(tmpdir))) """
).format(base=str(tmpdir))
)
return tmpdir.mkdir('foo') return tmpdir.mkdir("foo")
@pytest.mark.parametrize('collection', [None, "foocoll"]) @pytest.mark.parametrize("collection", [None, "foocoll"])
def test_basic(storage, runner, collection): def test_basic(storage, runner, collection):
if collection is not None: if collection is not None:
storage = storage.mkdir(collection) storage = storage.mkdir(collection)
collection_arg = f'foo/{collection}' collection_arg = f"foo/{collection}"
else: else:
collection_arg = 'foo' collection_arg = "foo"
argv = ['repair', collection_arg] argv = ["repair", collection_arg]
result = runner.invoke(argv, input='y') result = runner.invoke(argv, input="y")
assert not result.exception assert not result.exception
storage.join('item.txt').write('BEGIN:VCARD\nEND:VCARD') storage.join("item.txt").write("BEGIN:VCARD\nEND:VCARD")
storage.join('toobroken.txt').write('') storage.join("toobroken.txt").write("")
result = runner.invoke(argv, input='y') result = runner.invoke(argv, input="y")
assert not result.exception assert not result.exception
assert 'No UID' in result.output assert "No UID" in result.output
assert '\'toobroken.txt\' is malformed beyond repair' \ assert "'toobroken.txt' is malformed beyond repair" in result.output
in result.output (new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)]
new_fname, = [x for x in storage.listdir() if 'toobroken' not in str(x)] assert "UID:" in new_fname.read()
assert 'UID:' in new_fname.read()
@pytest.mark.parametrize('repair_uids', [None, True, False]) @pytest.mark.parametrize("repair_uids", [None, True, False])
def test_repair_uids(storage, runner, repair_uids): def test_repair_uids(storage, runner, repair_uids):
f = storage.join('baduid.txt') f = storage.join("baduid.txt")
orig_f = 'BEGIN:VCARD\nUID:!!!!!\nEND:VCARD' orig_f = "BEGIN:VCARD\nUID:!!!!!\nEND:VCARD"
f.write(orig_f) f.write(orig_f)
if repair_uids is None: if repair_uids is None:
opt = [] opt = []
elif repair_uids: elif repair_uids:
opt = ['--repair-unsafe-uid'] opt = ["--repair-unsafe-uid"]
else: else:
opt = ['--no-repair-unsafe-uid'] opt = ["--no-repair-unsafe-uid"]
result = runner.invoke(['repair'] + opt + ['foo'], input='y') result = runner.invoke(["repair"] + opt + ["foo"], input="y")
assert not result.exception assert not result.exception
if repair_uids: if repair_uids:
assert 'UID or href is unsafe, assigning random UID' in result.output assert "UID or href is unsafe, assigning random UID" in result.output
assert not f.exists() assert not f.exists()
new_f, = storage.listdir() (new_f,) = storage.listdir()
s = new_f.read() s = new_f.read()
assert s.startswith('BEGIN:VCARD') assert s.startswith("BEGIN:VCARD")
assert s.endswith('END:VCARD') assert s.endswith("END:VCARD")
assert s != orig_f assert s != orig_f
else: else:
assert 'UID may cause problems, add --repair-unsafe-uid to repair.' \ assert (
"UID may cause problems, add --repair-unsafe-uid to repair."
in result.output in result.output
)
assert f.read() == orig_f assert f.read() == orig_f

View file

@ -2,14 +2,13 @@ import json
import sys import sys
from textwrap import dedent from textwrap import dedent
import hypothesis.strategies as st
import pytest import pytest
from hypothesis import example
from hypothesis import given
def test_simple_run(tmpdir, runner): def test_simple_run(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair my_pair] [pair my_pair]
a = "my_a" a = "my_a"
b = "my_b" b = "my_b"
@ -24,33 +23,37 @@ def test_simple_run(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{0}/path_b/" path = "{0}/path_b/"
fileext = ".txt" fileext = ".txt"
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
tmpdir.mkdir('path_a') tmpdir.mkdir("path_a")
tmpdir.mkdir('path_b') tmpdir.mkdir("path_b")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
tmpdir.join('path_a/haha.txt').write('UID:haha') tmpdir.join("path_a/haha.txt").write("UID:haha")
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert 'Copying (uploading) item haha to my_b' in result.output assert "Copying (uploading) item haha to my_b" in result.output
assert tmpdir.join('path_b/haha.txt').read() == 'UID:haha' assert tmpdir.join("path_b/haha.txt").read() == "UID:haha"
def test_sync_inexistant_pair(tmpdir, runner): def test_sync_inexistant_pair(tmpdir, runner):
runner.write_with_general("") runner.write_with_general("")
result = runner.invoke(['sync', 'foo']) result = runner.invoke(["sync", "foo"])
assert result.exception assert result.exception
assert 'pair foo does not exist.' in result.output.lower() assert "pair foo does not exist." in result.output.lower()
def test_debug_connections(tmpdir, runner): def test_debug_connections(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair my_pair] [pair my_pair]
a = "my_a" a = "my_a"
b = "my_b" b = "my_b"
@ -65,23 +68,27 @@ def test_debug_connections(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{0}/path_b/" path = "{0}/path_b/"
fileext = ".txt" fileext = ".txt"
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
tmpdir.mkdir('path_a') tmpdir.mkdir("path_a")
tmpdir.mkdir('path_b') tmpdir.mkdir("path_b")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['-vdebug', 'sync', '--max-workers=3']) result = runner.invoke(["-vdebug", "sync", "--max-workers=3"])
assert 'using 3 maximal workers' in result.output.lower() assert "using 3 maximal workers" in result.output.lower()
result = runner.invoke(['-vdebug', 'sync']) result = runner.invoke(["-vdebug", "sync"])
assert 'using 1 maximal workers' in result.output.lower() assert "using 1 maximal workers" in result.output.lower()
def test_empty_storage(tmpdir, runner): def test_empty_storage(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair my_pair] [pair my_pair]
a = "my_a" a = "my_a"
b = "my_b" b = "my_b"
@ -96,32 +103,35 @@ def test_empty_storage(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{0}/path_b/" path = "{0}/path_b/"
fileext = ".txt" fileext = ".txt"
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
tmpdir.mkdir('path_a') tmpdir.mkdir("path_a")
tmpdir.mkdir('path_b') tmpdir.mkdir("path_b")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
tmpdir.join('path_a/haha.txt').write('UID:haha') tmpdir.join("path_a/haha.txt").write("UID:haha")
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
tmpdir.join('path_b/haha.txt').remove() tmpdir.join("path_b/haha.txt").remove()
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
lines = result.output.splitlines() lines = result.output.splitlines()
assert lines[0] == 'Syncing my_pair' assert lines[0] == "Syncing my_pair"
assert lines[1].startswith('error: my_pair: ' assert lines[1].startswith(
'Storage "my_b" was completely emptied.') "error: my_pair: " 'Storage "my_b" was completely emptied.'
)
assert result.exception assert result.exception
def test_verbosity(tmpdir, runner): def test_verbosity(tmpdir, runner):
runner.write_with_general('') runner.write_with_general("")
result = runner.invoke(['--verbosity=HAHA', 'sync']) result = runner.invoke(["--verbosity=HAHA", "sync"])
assert result.exception assert result.exception
assert ( assert (
'invalid value for "--verbosity"' in result.output.lower() 'invalid value for "--verbosity"' in result.output.lower()
@ -130,13 +140,15 @@ def test_verbosity(tmpdir, runner):
def test_collections_cache_invalidation(tmpdir, runner): def test_collections_cache_invalidation(tmpdir, runner):
foo = tmpdir.mkdir('foo') foo = tmpdir.mkdir("foo")
bar = tmpdir.mkdir('bar') bar = tmpdir.mkdir("bar")
for x in 'abc': for x in "abc":
foo.mkdir(x) foo.mkdir(x)
bar.mkdir(x) bar.mkdir(x)
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{0}/foo/" path = "{0}/foo/"
@ -151,22 +163,26 @@ def test_collections_cache_invalidation(tmpdir, runner):
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = ["a", "b", "c"] collections = ["a", "b", "c"]
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
foo.join('a/itemone.txt').write('UID:itemone') foo.join("a/itemone.txt").write("UID:itemone")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert 'detected change in config file' not in result.output.lower() assert "detected change in config file" not in result.output.lower()
rv = bar.join('a').listdir() rv = bar.join("a").listdir()
assert len(rv) == 1 assert len(rv) == 1
assert rv[0].basename == 'itemone.txt' assert rv[0].basename == "itemone.txt"
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{0}/foo/" path = "{0}/foo/"
@ -181,32 +197,36 @@ def test_collections_cache_invalidation(tmpdir, runner):
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = ["a", "b", "c"] collections = ["a", "b", "c"]
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
for entry in tmpdir.join('status').listdir(): for entry in tmpdir.join("status").listdir():
if not str(entry).endswith('.collections'): if not str(entry).endswith(".collections"):
entry.remove() entry.remove()
bar2 = tmpdir.mkdir('bar2') bar2 = tmpdir.mkdir("bar2")
for x in 'abc': for x in "abc":
bar2.mkdir(x) bar2.mkdir(x)
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert 'detected change in config file' in result.output.lower() assert "detected change in config file" in result.output.lower()
assert result.exception assert result.exception
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
rv = bar.join('a').listdir() rv = bar.join("a").listdir()
rv2 = bar2.join('a').listdir() rv2 = bar2.join("a").listdir()
assert len(rv) == len(rv2) == 1 assert len(rv) == len(rv2) == 1
assert rv[0].basename == rv2[0].basename == 'itemone.txt' assert rv[0].basename == rv2[0].basename == "itemone.txt"
def test_invalid_pairs_as_cli_arg(tmpdir, runner): def test_invalid_pairs_as_cli_arg(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{0}/foo/" path = "{0}/foo/"
@ -221,114 +241,117 @@ def test_invalid_pairs_as_cli_arg(tmpdir, runner):
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = ["a", "b", "c"] collections = ["a", "b", "c"]
''').format(str(tmpdir))) """
).format(str(tmpdir))
)
for base in ('foo', 'bar'): for base in ("foo", "bar"):
base = tmpdir.mkdir(base) base = tmpdir.mkdir(base)
for c in 'abc': for c in "abc":
base.mkdir(c) base.mkdir(c)
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync', 'foobar/d']) result = runner.invoke(["sync", "foobar/d"])
assert result.exception assert result.exception
assert 'pair foobar: collection "d" not found' in result.output.lower() assert 'pair foobar: collection "d" not found' in result.output.lower()
def test_multiple_pairs(tmpdir, runner): def test_multiple_pairs(tmpdir, runner):
def get_cfg(): def get_cfg():
for name_a, name_b in ('foo', 'bar'), ('bam', 'baz'): for name_a, name_b in ("foo", "bar"), ("bam", "baz"):
yield dedent(''' yield dedent(
"""
[pair {a}{b}] [pair {a}{b}]
a = "{a}" a = "{a}"
b = "{b}" b = "{b}"
collections = null collections = null
''').format(a=name_a, b=name_b) """
).format(a=name_a, b=name_b)
for name in name_a, name_b: for name in name_a, name_b:
yield dedent(''' yield dedent(
"""
[storage {name}] [storage {name}]
type = "filesystem" type = "filesystem"
path = "{path}" path = "{path}"
fileext = ".txt" fileext = ".txt"
''').format(name=name, path=str(tmpdir.mkdir(name))) """
).format(name=name, path=str(tmpdir.mkdir(name)))
runner.write_with_general(''.join(get_cfg())) runner.write_with_general("".join(get_cfg()))
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
assert set(result.output.splitlines()) > { assert set(result.output.splitlines()) > {
'Discovering collections for pair bambaz', "Discovering collections for pair bambaz",
'Discovering collections for pair foobar' "Discovering collections for pair foobar",
} }
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert not result.exception assert not result.exception
assert set(result.output.splitlines()) == { assert set(result.output.splitlines()) == {
'Syncing bambaz', "Syncing bambaz",
'Syncing foobar', "Syncing foobar",
} }
# XXX: https://github.com/pimutils/vdirsyncer/issues/617 # XXX: https://github.com/pimutils/vdirsyncer/issues/617
@pytest.mark.skipif(sys.platform == 'darwin', @pytest.mark.skipif(sys.platform == "darwin", reason="This test inexplicably fails")
reason='This test inexplicably fails') @pytest.mark.parametrize(
@given(collections=st.sets( "collections",
st.text( [
st.characters( ("a", "A"),
blacklist_characters=set( ("\ufffe",),
'./\x00' # Invalid chars on POSIX filesystems ("Hello there!",),
), ("Österreich",),
# Surrogates can't be encoded to utf-8 in Python ("中国", "x1"),
blacklist_categories={'Cs'} ("한글",),
), ("42a4ec99-b1c2-4859-b142-759112f2ca50",),
min_size=1, ("فلسطين",),
max_size=50 ],
), )
min_size=1 def test_create_collections(collections, tmpdir, runner):
))
@example(collections=['persönlich'])
@example(collections={'a', 'A'})
@example(collections={'\ufffe'})
def test_create_collections(subtest, collections):
@subtest runner.write_with_general(
def test_inner(tmpdir, runner): dedent(
runner.write_with_general(dedent(''' """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = {colls} collections = {colls}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{base}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{base}/bar/"
fileext = ".txt" fileext = ".txt"
'''.format(base=str(tmpdir), colls=json.dumps(list(collections))))) """.format(
base=str(tmpdir), colls=json.dumps(list(collections))
result = runner.invoke( )
['discover'],
input='y\n' * 2 * (len(collections) + 1)
) )
assert not result.exception, result.output )
result = runner.invoke( result = runner.invoke(["discover"], input="y\n" * 2 * (len(collections) + 1))
['sync'] + ['foobar/' + x for x in collections] assert not result.exception, result.output
)
assert not result.exception, result.output
assert {x.basename for x in tmpdir.join('foo').listdir()} == \ result = runner.invoke(["sync"] + ["foobar/" + x for x in collections])
{x.basename for x in tmpdir.join('bar').listdir()} assert not result.exception, result.output
assert {x.basename for x in tmpdir.join("foo").listdir()} == {
x.basename for x in tmpdir.join("bar").listdir()
}
def test_ident_conflict(tmpdir, runner): def test_ident_conflict(tmpdir, runner):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -343,35 +366,51 @@ def test_ident_conflict(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{base}/bar/"
fileext = ".txt" fileext = ".txt"
'''.format(base=str(tmpdir)))) """.format(
base=str(tmpdir)
)
)
)
foo = tmpdir.mkdir('foo') foo = tmpdir.mkdir("foo")
tmpdir.mkdir('bar') tmpdir.mkdir("bar")
foo.join('one.txt').write('UID:1') foo.join("one.txt").write("UID:1")
foo.join('two.txt').write('UID:1') foo.join("two.txt").write("UID:1")
foo.join('three.txt').write('UID:1') foo.join("three.txt").write("UID:1")
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert not result.exception assert not result.exception
result = runner.invoke(['sync']) result = runner.invoke(["sync"])
assert result.exception assert result.exception
assert ('error: foobar: Storage "foo" contains multiple items with the ' assert (
'same UID or even content') in result.output 'error: foobar: Storage "foo" contains multiple items with the '
assert sorted([ "same UID or even content"
'one.txt' in result.output, ) in result.output
'two.txt' in result.output, assert (
'three.txt' in result.output, sorted(
]) == [False, True, True] [
"one.txt" in result.output,
"two.txt" in result.output,
"three.txt" in result.output,
]
)
== [False, True, True]
)
@pytest.mark.parametrize('existing,missing', [ @pytest.mark.parametrize(
('foo', 'bar'), "existing,missing",
('bar', 'foo'), [
]) ("foo", "bar"),
("bar", "foo"),
],
)
def test_unknown_storage(tmpdir, runner, existing, missing): def test_unknown_storage(tmpdir, runner, existing, missing):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -381,35 +420,42 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
type = "filesystem" type = "filesystem"
path = "{base}/{existing}/" path = "{base}/{existing}/"
fileext = ".txt" fileext = ".txt"
'''.format(base=str(tmpdir), existing=existing))) """.format(
base=str(tmpdir), existing=existing
)
)
)
tmpdir.mkdir(existing) tmpdir.mkdir(existing)
result = runner.invoke(['discover']) result = runner.invoke(["discover"])
assert result.exception assert result.exception
assert ( assert (
"Storage '{missing}' not found. " "Storage '{missing}' not found. "
"These are the configured storages: ['{existing}']" "These are the configured storages: ['{existing}']".format(
.format(missing=missing, existing=existing) missing=missing, existing=existing
)
) in result.output ) in result.output
@pytest.mark.parametrize('cmd', ['sync', 'metasync']) @pytest.mark.parametrize("cmd", ["sync", "metasync"])
def test_no_configured_pairs(tmpdir, runner, cmd): def test_no_configured_pairs(tmpdir, runner, cmd):
runner.write_with_general('') runner.write_with_general("")
result = runner.invoke([cmd]) result = runner.invoke([cmd])
assert result.output == 'critical: Nothing to do.\n' assert result.output == "critical: Nothing to do.\n"
assert result.exception.code == 5 assert result.exception.code == 5
@pytest.mark.parametrize('resolution,expect_foo,expect_bar', [ @pytest.mark.parametrize(
(['command', 'cp'], 'UID:lol\nfööcontent', 'UID:lol\nfööcontent') "resolution,expect_foo,expect_bar",
]) [(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, )
expect_bar): def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -425,28 +471,34 @@ def test_conflict_resolution(tmpdir, runner, resolution, expect_foo,
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{base}/bar" path = "{base}/bar"
'''.format(base=str(tmpdir), val=json.dumps(resolution)))) """.format(
base=str(tmpdir), val=json.dumps(resolution)
)
)
)
foo = tmpdir.join('foo') foo = tmpdir.join("foo")
bar = tmpdir.join('bar') bar = tmpdir.join("bar")
fooitem = foo.join('lol.txt').ensure() fooitem = foo.join("lol.txt").ensure()
fooitem.write('UID:lol\nfööcontent') fooitem.write("UID:lol\nfööcontent")
baritem = bar.join('lol.txt').ensure() baritem = bar.join("lol.txt").ensure()
baritem.write('UID:lol\nbööcontent') baritem.write("UID:lol\nbööcontent")
r = runner.invoke(['discover']) r = runner.invoke(["discover"])
assert not r.exception assert not r.exception
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
assert not r.exception assert not r.exception
assert fooitem.read() == expect_foo assert fooitem.read() == expect_foo
assert baritem.read() == expect_bar assert baritem.read() == expect_bar
@pytest.mark.parametrize('partial_sync', ['error', 'ignore', 'revert', None]) @pytest.mark.parametrize("partial_sync", ["error", "ignore", "revert", None])
def test_partial_sync(tmpdir, runner, partial_sync): def test_partial_sync(tmpdir, runner, partial_sync):
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -463,58 +515,69 @@ def test_partial_sync(tmpdir, runner, partial_sync):
read_only = true read_only = true
fileext = ".txt" fileext = ".txt"
path = "{base}/bar" path = "{base}/bar"
'''.format( """.format(
partial_sync=(f'partial_sync = "{partial_sync}"\n' partial_sync=(
if partial_sync else ''), f'partial_sync = "{partial_sync}"\n' if partial_sync else ""
base=str(tmpdir) ),
))) base=str(tmpdir),
)
)
)
foo = tmpdir.mkdir('foo') foo = tmpdir.mkdir("foo")
bar = tmpdir.mkdir('bar') bar = tmpdir.mkdir("bar")
foo.join('other.txt').write('UID:other') foo.join("other.txt").write("UID:other")
bar.join('other.txt').write('UID:other') bar.join("other.txt").write("UID:other")
baritem = bar.join('lol.txt') baritem = bar.join("lol.txt")
baritem.write('UID:lol') baritem.write("UID:lol")
r = runner.invoke(['discover']) r = runner.invoke(["discover"])
assert not r.exception assert not r.exception
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
assert not r.exception assert not r.exception
fooitem = foo.join('lol.txt') fooitem = foo.join("lol.txt")
fooitem.remove() fooitem.remove()
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
if partial_sync == 'error': if partial_sync == "error":
assert r.exception assert r.exception
assert 'Attempted change' in r.output assert "Attempted change" in r.output
elif partial_sync == 'ignore': elif partial_sync == "ignore":
assert baritem.exists() assert baritem.exists()
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
assert not r.exception assert not r.exception
assert baritem.exists() assert baritem.exists()
else: else:
assert baritem.exists() assert baritem.exists()
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
assert not r.exception assert not r.exception
assert baritem.exists() assert baritem.exists()
assert fooitem.exists() assert fooitem.exists()
def test_fetch_only_necessary_params(tmpdir, runner): def test_fetch_only_necessary_params(tmpdir, runner):
fetched_file = tmpdir.join('fetched_flag') fetched_file = tmpdir.join("fetched_flag")
fetch_script = tmpdir.join('fetch_script') fetch_script = tmpdir.join("fetch_script")
fetch_script.write(dedent(''' fetch_script.write(
dedent(
"""
set -e set -e
touch "{}" touch "{}"
echo ".txt" echo ".txt"
'''.format(str(fetched_file)))) """.format(
str(fetched_file)
)
)
)
runner.write_with_general(dedent(''' runner.write_with_general(
dedent(
"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -539,7 +602,11 @@ def test_fetch_only_necessary_params(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{path}" path = "{path}"
fileext.fetch = ["command", "sh", "{script}"] fileext.fetch = ["command", "sh", "{script}"]
'''.format(path=str(tmpdir.mkdir('bogus')), script=str(fetch_script)))) """.format(
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
)
)
)
def fetched(): def fetched():
try: try:
@ -548,18 +615,18 @@ def test_fetch_only_necessary_params(tmpdir, runner):
except Exception: except Exception:
return False return False
r = runner.invoke(['discover']) r = runner.invoke(["discover"])
assert not r.exception assert not r.exception
assert fetched() assert fetched()
r = runner.invoke(['sync', 'foobar']) r = runner.invoke(["sync", "foobar"])
assert not r.exception assert not r.exception
assert not fetched() assert not fetched()
r = runner.invoke(['sync']) r = runner.invoke(["sync"])
assert not r.exception assert not r.exception
assert fetched() assert fetched()
r = runner.invoke(['sync', 'bambar']) r = runner.invoke(["sync", "bambar"])
assert not r.exception assert not r.exception
assert fetched() assert fetched()

View file

@ -6,20 +6,20 @@ from vdirsyncer.cli.utils import storage_names
def test_handle_cli_error(capsys): def test_handle_cli_error(capsys):
try: try:
raise exceptions.InvalidResponse('ayy lmao') raise exceptions.InvalidResponse("ayy lmao")
except BaseException: except BaseException:
handle_cli_error() handle_cli_error()
out, err = capsys.readouterr() out, err = capsys.readouterr()
assert 'returned something vdirsyncer doesn\'t understand' in err assert "returned something vdirsyncer doesn't understand" in err
assert 'ayy lmao' in err assert "ayy lmao" in err
def test_storage_instance_from_config(monkeypatch): def test_storage_instance_from_config(monkeypatch):
def lol(**kw): def lol(**kw):
assert kw == {'foo': 'bar', 'baz': 1} assert kw == {"foo": "bar", "baz": 1}
return 'OK' return "OK"
monkeypatch.setitem(storage_names._storages, 'lol', lol) monkeypatch.setitem(storage_names._storages, "lol", lol)
config = {'type': 'lol', 'foo': 'bar', 'baz': 1} config = {"type": "lol", "foo": "bar", "baz": 1}
assert storage_instance_from_config(config) == 'OK' assert storage_instance_from_config(config) == "OK"

View file

@ -11,7 +11,7 @@ from vdirsyncer import utils
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def no_debug_output(request): def no_debug_output(request):
logger = click_log.basic_config('vdirsyncer') logger = click_log.basic_config("vdirsyncer")
logger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING)
@ -19,49 +19,55 @@ def test_get_storage_init_args():
from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.storage.memory import MemoryStorage
all, required = utils.get_storage_init_args(MemoryStorage) all, required = utils.get_storage_init_args(MemoryStorage)
assert all == {'fileext', 'collection', 'read_only', 'instance_name'} assert all == {"fileext", "collection", "read_only", "instance_name"}
assert not required assert not required
def test_request_ssl(httpsserver): def test_request_ssl():
httpsserver.serve_content('') # we need to serve something
with pytest.raises(requests.exceptions.ConnectionError) as excinfo: with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
http.request('GET', httpsserver.url) http.request("GET", "https://self-signed.badssl.com/")
assert 'certificate verify failed' in str(excinfo.value) assert "certificate verify failed" in str(excinfo.value)
http.request('GET', httpsserver.url, verify=False) http.request("GET", "https://self-signed.badssl.com/", verify=False)
def _fingerprints_broken(): def _fingerprints_broken():
from pkg_resources import parse_version as ver from pkg_resources import parse_version as ver
broken_urllib3 = ver(requests.__version__) <= ver('2.5.1')
broken_urllib3 = ver(requests.__version__) <= ver("2.5.1")
return broken_urllib3 return broken_urllib3
@pytest.mark.skipif(_fingerprints_broken(), @pytest.mark.skipif(
reason='https://github.com/shazow/urllib3/issues/529') _fingerprints_broken(), reason="https://github.com/shazow/urllib3/issues/529"
@pytest.mark.parametrize('fingerprint', [ )
'94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA', @pytest.mark.parametrize(
'19:90:F7:23:94:F2:EF:AB:2B:64:2D:57:3D:25:95:2D' "fingerprint",
]) [
"94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA",
"19:90:F7:23:94:F2:EF:AB:2B:64:2D:57:3D:25:95:2D",
],
)
def test_request_ssl_fingerprints(httpsserver, fingerprint): def test_request_ssl_fingerprints(httpsserver, fingerprint):
httpsserver.serve_content('') # we need to serve something httpsserver.serve_content("") # we need to serve something
http.request('GET', httpsserver.url, verify=False, http.request("GET", httpsserver.url, verify=False, verify_fingerprint=fingerprint)
verify_fingerprint=fingerprint)
with pytest.raises(requests.exceptions.ConnectionError) as excinfo: with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
http.request('GET', httpsserver.url, http.request("GET", httpsserver.url, verify_fingerprint=fingerprint)
verify_fingerprint=fingerprint)
with pytest.raises(requests.exceptions.ConnectionError) as excinfo: with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
http.request('GET', httpsserver.url, verify=False, http.request(
verify_fingerprint=''.join(reversed(fingerprint))) "GET",
assert 'Fingerprints did not match' in str(excinfo.value) httpsserver.url,
verify=False,
verify_fingerprint="".join(reversed(fingerprint)),
)
assert "Fingerprints did not match" in str(excinfo.value)
def test_open_graphical_browser(monkeypatch): def test_open_graphical_browser(monkeypatch):
import webbrowser import webbrowser
# Just assert that this internal attribute still exists and behaves the way # Just assert that this internal attribute still exists and behaves the way
# expected # expected
if sys.version_info < (3, 7): if sys.version_info < (3, 7):
@ -69,9 +75,9 @@ def test_open_graphical_browser(monkeypatch):
else: else:
assert webbrowser._tryorder is None assert webbrowser._tryorder is None
monkeypatch.setattr('webbrowser._tryorder', []) monkeypatch.setattr("webbrowser._tryorder", [])
with pytest.raises(RuntimeError) as excinfo: with pytest.raises(RuntimeError) as excinfo:
utils.open_graphical_browser('http://example.com') utils.open_graphical_browser("http://example.com")
assert 'No graphical browser found' in str(excinfo.value) assert "No graphical browser found" in str(excinfo.value)

View file

@ -7,18 +7,20 @@ from vdirsyncer.vobject import Item
def test_conflict_resolution_command(): def test_conflict_resolution_command():
def check_call(command): def check_call(command):
command, a_tmp, b_tmp = command command, a_tmp, b_tmp = command
assert command == os.path.expanduser('~/command') assert command == os.path.expanduser("~/command")
with open(a_tmp) as f: with open(a_tmp) as f:
assert f.read() == a.raw assert f.read() == a.raw
with open(b_tmp) as f: with open(b_tmp) as f:
assert f.read() == b.raw assert f.read() == b.raw
with open(b_tmp, 'w') as f: with open(b_tmp, "w") as f:
f.write(a.raw) f.write(a.raw)
a = Item('UID:AAAAAAA') a = Item("UID:AAAAAAA")
b = Item('UID:BBBBBBB') b = Item("UID:BBBBBBB")
assert _resolve_conflict_via_command( assert (
a, b, ['~/command'], 'a', 'b', _resolve_conflict_via_command(
_check_call=check_call a, b, ["~/command"], "a", "b", _check_call=check_call
).raw == a.raw ).raw
== a.raw
)

View file

@ -6,74 +6,161 @@ from vdirsyncer.cli.discover import expand_collections
missing = object() missing = object()
@pytest.mark.parametrize('shortcuts,expected', [ @pytest.mark.parametrize(
(['from a'], [ "shortcuts,expected",
('c1', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'}, [
{'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'})), (
('c2', ({'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'}, ["from a"],
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})), [
('a3', ({'type': 'fooboo', 'custom_arg': 'a3', 'collection': 'a3'}, (
missing)) "c1",
]), (
(['from b'], [ {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
('c1', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'}, {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
{'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'})), ),
('c2', ({'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'}, ),
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})), (
('b3', (missing, "c2",
{'type': 'fooboo', 'custom_arg': 'b3', 'collection': 'b3'})) (
]), {"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
(['from a', 'from b'], [ {"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
('c1', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'}, ),
{'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'})), ),
('c2', ({'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'}, (
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})), "a3",
('a3', ({'type': 'fooboo', 'custom_arg': 'a3', 'collection': 'a3'}, (
missing)), {"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
('b3', (missing, missing,
{'type': 'fooboo', 'custom_arg': 'b3', 'collection': 'b3'})) ),
]), ),
([['c12', 'c1', 'c2']], [ ],
('c12', ({'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'}, ),
{'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'})), (
]), ["from b"],
(None, [ [
(None, ({'type': 'fooboo', 'storage_side': 'a', 'collection': None}, (
{'type': 'fooboo', 'storage_side': 'b', 'collection': None})) "c1",
]), (
([None], [ {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
(None, ({'type': 'fooboo', 'storage_side': 'a', 'collection': None}, {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
{'type': 'fooboo', 'storage_side': 'b', 'collection': None})) ),
]), ),
]) (
"c2",
(
{"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
{"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
),
),
(
"b3",
(
missing,
{"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
),
),
],
),
(
["from a", "from b"],
[
(
"c1",
(
{"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
{"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
),
),
(
"c2",
(
{"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
{"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
),
),
(
"a3",
(
{"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
missing,
),
),
(
"b3",
(
missing,
{"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
),
),
],
),
(
[["c12", "c1", "c2"]],
[
(
"c12",
(
{"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
{"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
),
),
],
),
(
None,
[
(
None,
(
{"type": "fooboo", "storage_side": "a", "collection": None},
{"type": "fooboo", "storage_side": "b", "collection": None},
),
)
],
),
(
[None],
[
(
None,
(
{"type": "fooboo", "storage_side": "a", "collection": None},
{"type": "fooboo", "storage_side": "b", "collection": None},
),
)
],
),
],
)
def test_expand_collections(shortcuts, expected): def test_expand_collections(shortcuts, expected):
config_a = { config_a = {"type": "fooboo", "storage_side": "a"}
'type': 'fooboo',
'storage_side': 'a'
}
config_b = { config_b = {"type": "fooboo", "storage_side": "b"}
'type': 'fooboo',
'storage_side': 'b'
}
def get_discovered_a(): def get_discovered_a():
return { return {
'c1': {'type': 'fooboo', 'custom_arg': 'a1', 'collection': 'c1'}, "c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
'c2': {'type': 'fooboo', 'custom_arg': 'a2', 'collection': 'c2'}, "c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
'a3': {'type': 'fooboo', 'custom_arg': 'a3', 'collection': 'a3'} "a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
} }
def get_discovered_b(): def get_discovered_b():
return { return {
'c1': {'type': 'fooboo', 'custom_arg': 'b1', 'collection': 'c1'}, "c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
'c2': {'type': 'fooboo', 'custom_arg': 'b2', 'collection': 'c2'}, "c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
'b3': {'type': 'fooboo', 'custom_arg': 'b3', 'collection': 'b3'} "b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
} }
assert sorted(expand_collections( assert (
shortcuts, sorted(
config_a, config_b, expand_collections(
get_discovered_a, get_discovered_b, shortcuts,
lambda config, collection: missing config_a,
)) == sorted(expected) config_b,
get_discovered_a,
get_discovered_b,
lambda config, collection: missing,
)
)
== sorted(expected)
)

View file

@ -1,3 +1,6 @@
from contextlib import contextmanager
from unittest.mock import patch
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest import pytest
from hypothesis import given from hypothesis import given
@ -12,11 +15,23 @@ def mystrategy(monkeypatch):
def strategy(x): def strategy(x):
calls.append(x) calls.append(x)
return x return x
calls = [] calls = []
monkeypatch.setitem(STRATEGIES, 'mystrategy', strategy) monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)
return calls return calls
@contextmanager
def dummy_strategy():
def strategy(x):
calls.append(x)
return x
calls = []
with patch.dict(STRATEGIES, {"mystrategy": strategy}):
yield calls
@pytest.fixture @pytest.fixture
def value_cache(monkeypatch): def value_cache(monkeypatch):
_cache = {} _cache = {}
@ -30,70 +45,59 @@ def value_cache(monkeypatch):
def get_context(*a, **kw): def get_context(*a, **kw):
return FakeContext() return FakeContext()
monkeypatch.setattr('click.get_current_context', get_context) monkeypatch.setattr("click.get_current_context", get_context)
return _cache return _cache
def test_key_conflict(monkeypatch, mystrategy): def test_key_conflict(monkeypatch, mystrategy):
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
expand_fetch_params({ expand_fetch_params({"foo": "bar", "foo.fetch": ["mystrategy", "baz"]})
'foo': 'bar',
'foo.fetch': ['mystrategy', 'baz']
})
assert 'Can\'t set foo.fetch and foo.' in str(excinfo.value) assert "Can't set foo.fetch and foo." in str(excinfo.value)
@given(s=st.text(), t=st.text(min_size=1)) @given(s=st.text(), t=st.text(min_size=1))
def test_fuzzing(s, t, mystrategy): def test_fuzzing(s, t):
config = expand_fetch_params({ with dummy_strategy():
f'{s}.fetch': ['mystrategy', t] config = expand_fetch_params({f"{s}.fetch": ["mystrategy", t]})
})
assert config[s] == t assert config[s] == t
@pytest.mark.parametrize('value', [ @pytest.mark.parametrize("value", [[], "lol", 42])
[],
'lol',
42
])
def test_invalid_fetch_value(mystrategy, value): def test_invalid_fetch_value(mystrategy, value):
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
expand_fetch_params({ expand_fetch_params({"foo.fetch": value})
'foo.fetch': value
})
assert 'Expected a list' in str(excinfo.value) or \ assert "Expected a list" in str(
'Expected list of length > 0' in str(excinfo.value) excinfo.value
) or "Expected list of length > 0" in str(excinfo.value)
def test_unknown_strategy(): def test_unknown_strategy():
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
expand_fetch_params({ expand_fetch_params({"foo.fetch": ["unreal", "asdf"]})
'foo.fetch': ['unreal', 'asdf']
})
assert 'Unknown strategy' in str(excinfo.value) assert "Unknown strategy" in str(excinfo.value)
def test_caching(monkeypatch, mystrategy, value_cache): def test_caching(monkeypatch, mystrategy, value_cache):
orig_cfg = {'foo.fetch': ['mystrategy', 'asdf']} orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]}
rv = expand_fetch_params(orig_cfg) rv = expand_fetch_params(orig_cfg)
assert rv['foo'] == 'asdf' assert rv["foo"] == "asdf"
assert mystrategy == ['asdf'] assert mystrategy == ["asdf"]
assert len(value_cache) == 1 assert len(value_cache) == 1
rv = expand_fetch_params(orig_cfg) rv = expand_fetch_params(orig_cfg)
assert rv['foo'] == 'asdf' assert rv["foo"] == "asdf"
assert mystrategy == ['asdf'] assert mystrategy == ["asdf"]
assert len(value_cache) == 1 assert len(value_cache) == 1
value_cache.clear() value_cache.clear()
rv = expand_fetch_params(orig_cfg) rv = expand_fetch_params(orig_cfg)
assert rv['foo'] == 'asdf' assert rv["foo"] == "asdf"
assert mystrategy == ['asdf'] * 2 assert mystrategy == ["asdf"] * 2
assert len(value_cache) == 1 assert len(value_cache) == 1
@ -104,9 +108,9 @@ def test_failed_strategy(monkeypatch, value_cache):
calls.append(x) calls.append(x)
raise KeyboardInterrupt() raise KeyboardInterrupt()
monkeypatch.setitem(STRATEGIES, 'mystrategy', strategy) monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)
orig_cfg = {'foo.fetch': ['mystrategy', 'asdf']} orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]}
for _ in range(2): for _ in range(2):
with pytest.raises(KeyboardInterrupt): with pytest.raises(KeyboardInterrupt):
@ -118,9 +122,8 @@ def test_failed_strategy(monkeypatch, value_cache):
def test_empty_value(monkeypatch, mystrategy): def test_empty_value(monkeypatch, mystrategy):
with pytest.raises(exceptions.UserError) as excinfo: with pytest.raises(exceptions.UserError) as excinfo:
expand_fetch_params({ expand_fetch_params({"foo.fetch": ["mystrategy", ""]})
'foo.fetch': ['mystrategy', '']
})
assert 'Empty value for foo.fetch, this most likely indicates an error' \ assert "Empty value for foo.fetch, this most likely indicates an error" in str(
in str(excinfo.value) excinfo.value
)

View file

@ -1,42 +1,35 @@
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest
from hypothesis import assume from hypothesis import assume
from hypothesis import given from hypothesis import given
from vdirsyncer.sync.status import SqliteStatus from vdirsyncer.sync.status import SqliteStatus
@pytest.fixture(params=[
SqliteStatus
])
def new_status(request):
return request.param
status_dict_strategy = st.dictionaries( status_dict_strategy = st.dictionaries(
st.text(), st.text(),
st.tuples(*( st.tuples(
st.fixed_dictionaries({ *(
'href': st.text(), st.fixed_dictionaries(
'hash': st.text(), {"href": st.text(), "hash": st.text(), "etag": st.text()}
'etag': st.text() )
}) for _ in range(2) for _ in range(2)
)) )
),
) )
@given(status_dict=status_dict_strategy) @given(status_dict=status_dict_strategy)
def test_legacy_status(new_status, status_dict): def test_legacy_status(status_dict):
hrefs_a = {meta_a['href'] for meta_a, meta_b in status_dict.values()} hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
hrefs_b = {meta_b['href'] for meta_a, meta_b in status_dict.values()} hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
status = new_status() status = SqliteStatus()
status.load_legacy_status(status_dict) status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict assert dict(status.to_legacy_status()) == status_dict
for ident, (meta_a, meta_b) in status_dict.items(): for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a['href']) ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b['href']) ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident assert ident_a == ident_b == ident

View file

@ -22,7 +22,7 @@ from vdirsyncer.vobject import Item
def sync(a, b, status, *args, **kwargs): def sync(a, b, status, *args, **kwargs):
new_status = SqliteStatus(':memory:') new_status = SqliteStatus(":memory:")
new_status.load_legacy_status(status) new_status.load_legacy_status(status)
rv = _sync(a, b, new_status, *args, **kwargs) rv = _sync(a, b, new_status, *args, **kwargs)
status.clear() status.clear()
@ -41,7 +41,7 @@ def items(s):
def test_irrelevant_status(): def test_irrelevant_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {'1': ('1', 1234, '1.ics', 2345)} status = {"1": ("1", 1234, "1.ics", 2345)}
sync(a, b, status) sync(a, b, status)
assert not status assert not status
assert not items(a) assert not items(a)
@ -52,7 +52,7 @@ def test_missing_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item('asdf') item = Item("asdf")
a.upload(item) a.upload(item)
b.upload(item) b.upload(item)
sync(a, b, status) sync(a, b, status)
@ -65,14 +65,14 @@ def test_missing_status_and_different_items():
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item1 = Item('UID:1\nhaha') item1 = Item("UID:1\nhaha")
item2 = Item('UID:1\nhoho') item2 = Item("UID:1\nhoho")
a.upload(item1) a.upload(item1)
b.upload(item2) b.upload(item2)
with pytest.raises(SyncConflict): with pytest.raises(SyncConflict):
sync(a, b, status) sync(a, b, status)
assert not status assert not status
sync(a, b, status, conflict_resolution='a wins') sync(a, b, status, conflict_resolution="a wins")
assert items(a) == items(b) == {item1.raw} assert items(a) == items(b) == {item1.raw}
@ -82,8 +82,8 @@ def test_read_only_and_prefetch():
b.read_only = True b.read_only = True
status = {} status = {}
item1 = Item('UID:1\nhaha') item1 = Item("UID:1\nhaha")
item2 = Item('UID:2\nhoho') item2 = Item("UID:2\nhoho")
a.upload(item1) a.upload(item1)
a.upload(item2) a.upload(item2)
@ -98,11 +98,11 @@ def test_partial_sync_error():
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.upload(Item('UID:0')) a.upload(Item("UID:0"))
b.read_only = True b.read_only = True
with pytest.raises(PartialSync): with pytest.raises(PartialSync):
sync(a, b, status, partial_sync='error') sync(a, b, status, partial_sync="error")
def test_partial_sync_ignore(): def test_partial_sync_ignore():
@ -110,17 +110,17 @@ def test_partial_sync_ignore():
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item0 = Item('UID:0\nhehe') item0 = Item("UID:0\nhehe")
a.upload(item0) a.upload(item0)
b.upload(item0) b.upload(item0)
b.read_only = True b.read_only = True
item1 = Item('UID:1\nhaha') item1 = Item("UID:1\nhaha")
a.upload(item1) a.upload(item1)
sync(a, b, status, partial_sync='ignore') sync(a, b, status, partial_sync="ignore")
sync(a, b, status, partial_sync='ignore') sync(a, b, status, partial_sync="ignore")
assert items(a) == {item0.raw, item1.raw} assert items(a) == {item0.raw, item1.raw}
assert items(b) == {item0.raw} assert items(b) == {item0.raw}
@ -131,69 +131,69 @@ def test_partial_sync_ignore2():
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = a.upload(Item('UID:0')) href, etag = a.upload(Item("UID:0"))
a.read_only = True a.read_only = True
sync(a, b, status, partial_sync='ignore', force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(b) == items(a) == {'UID:0'} assert items(b) == items(a) == {"UID:0"}
b.items.clear() b.items.clear()
sync(a, b, status, partial_sync='ignore', force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
sync(a, b, status, partial_sync='ignore', force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(a) == {'UID:0'} assert items(a) == {"UID:0"}
assert not b.items assert not b.items
a.read_only = False a.read_only = False
a.update(href, Item('UID:0\nupdated'), etag) a.update(href, Item("UID:0\nupdated"), etag)
a.read_only = True a.read_only = True
sync(a, b, status, partial_sync='ignore', force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(b) == items(a) == {'UID:0\nupdated'} assert items(b) == items(a) == {"UID:0\nupdated"}
def test_upload_and_update(): def test_upload_and_update():
a = MemoryStorage(fileext='.a') a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext='.b') b = MemoryStorage(fileext=".b")
status = {} status = {}
item = Item('UID:1') # new item 1 in a item = Item("UID:1") # new item 1 in a
a.upload(item) a.upload(item)
sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw} assert items(b) == items(a) == {item.raw}
item = Item('UID:1\nASDF:YES') # update of item 1 in b item = Item("UID:1\nASDF:YES") # update of item 1 in b
b.update('1.b', item, b.get('1.b')[1]) b.update("1.b", item, b.get("1.b")[1])
sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw} assert items(b) == items(a) == {item.raw}
item2 = Item('UID:2') # new item 2 in b item2 = Item("UID:2") # new item 2 in b
b.upload(item2) b.upload(item2)
sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw, item2.raw} assert items(b) == items(a) == {item.raw, item2.raw}
item2 = Item('UID:2\nASDF:YES') # update of item 2 in a item2 = Item("UID:2\nASDF:YES") # update of item 2 in a
a.update('2.a', item2, a.get('2.a')[1]) a.update("2.a", item2, a.get("2.a")[1])
sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw, item2.raw} assert items(b) == items(a) == {item.raw, item2.raw}
def test_deletion(): def test_deletion():
a = MemoryStorage(fileext='.a') a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext='.b') b = MemoryStorage(fileext=".b")
status = {} status = {}
item = Item('UID:1') item = Item("UID:1")
a.upload(item) a.upload(item)
item2 = Item('UID:2') item2 = Item("UID:2")
a.upload(item2) a.upload(item2)
sync(a, b, status) sync(a, b, status)
b.delete('1.b', b.get('1.b')[1]) b.delete("1.b", b.get("1.b")[1])
sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item2.raw} assert items(a) == items(b) == {item2.raw}
a.upload(item) a.upload(item)
sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item.raw, item2.raw} assert items(a) == items(b) == {item.raw, item2.raw}
a.delete('1.a', a.get('1.a')[1]) a.delete("1.a", a.get("1.a")[1])
sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item2.raw} assert items(a) == items(b) == {item2.raw}
@ -203,38 +203,34 @@ def test_insert_hash():
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item('UID:1') item = Item("UID:1")
href, etag = a.upload(item) href, etag = a.upload(item)
sync(a, b, status) sync(a, b, status)
for d in status['1']: for d in status["1"]:
del d['hash'] del d["hash"]
a.update(href, Item('UID:1\nHAHA:YES'), etag) a.update(href, Item("UID:1\nHAHA:YES"), etag)
sync(a, b, status) sync(a, b, status)
assert 'hash' in status['1'][0] and 'hash' in status['1'][1] assert "hash" in status["1"][0] and "hash" in status["1"][1]
def test_already_synced(): def test_already_synced():
a = MemoryStorage(fileext='.a') a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext='.b') b = MemoryStorage(fileext=".b")
item = Item('UID:1') item = Item("UID:1")
a.upload(item) a.upload(item)
b.upload(item) b.upload(item)
status = { status = {
'1': ({ "1": (
'href': '1.a', {"href": "1.a", "hash": item.hash, "etag": a.get("1.a")[1]},
'hash': item.hash, {"href": "1.b", "hash": item.hash, "etag": b.get("1.b")[1]},
'etag': a.get('1.a')[1] )
}, {
'href': '1.b',
'hash': item.hash,
'etag': b.get('1.b')[1]
})
} }
old_status = deepcopy(status) old_status = deepcopy(status)
a.update = b.update = a.upload = b.upload = \ a.update = b.update = a.upload = b.upload = lambda *a, **kw: pytest.fail(
lambda *a, **kw: pytest.fail('Method shouldn\'t have been called.') "Method shouldn't have been called."
)
for _ in (1, 2): for _ in (1, 2):
sync(a, b, status) sync(a, b, status)
@ -242,38 +238,38 @@ def test_already_synced():
assert items(a) == items(b) == {item.raw} assert items(a) == items(b) == {item.raw}
@pytest.mark.parametrize('winning_storage', 'ab') @pytest.mark.parametrize("winning_storage", "ab")
def test_conflict_resolution_both_etags_new(winning_storage): def test_conflict_resolution_both_etags_new(winning_storage):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item('UID:1') item = Item("UID:1")
href_a, etag_a = a.upload(item) href_a, etag_a = a.upload(item)
href_b, etag_b = b.upload(item) href_b, etag_b = b.upload(item)
status = {} status = {}
sync(a, b, status) sync(a, b, status)
assert status assert status
item_a = Item('UID:1\nitem a') item_a = Item("UID:1\nitem a")
item_b = Item('UID:1\nitem b') item_b = Item("UID:1\nitem b")
a.update(href_a, item_a, etag_a) a.update(href_a, item_a, etag_a)
b.update(href_b, item_b, etag_b) b.update(href_b, item_b, etag_b)
with pytest.raises(SyncConflict): with pytest.raises(SyncConflict):
sync(a, b, status) sync(a, b, status)
sync(a, b, status, conflict_resolution=f'{winning_storage} wins') sync(a, b, status, conflict_resolution=f"{winning_storage} wins")
assert items(a) == items(b) == { assert (
item_a.raw if winning_storage == 'a' else item_b.raw items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw}
} )
def test_updated_and_deleted(): def test_updated_and_deleted():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = a.upload(Item('UID:1')) href_a, etag_a = a.upload(Item("UID:1"))
status = {} status = {}
sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
(href_b, etag_b), = b.list() ((href_b, etag_b),) = b.list()
b.delete(href_b, etag_b) b.delete(href_b, etag_b)
updated = Item('UID:1\nupdated') updated = Item("UID:1\nupdated")
a.update(href_a, updated, etag_a) a.update(href_a, updated, etag_a)
sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
@ -283,35 +279,35 @@ def test_updated_and_deleted():
def test_conflict_resolution_invalid_mode(): def test_conflict_resolution_invalid_mode():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item_a = Item('UID:1\nitem a') item_a = Item("UID:1\nitem a")
item_b = Item('UID:1\nitem b') item_b = Item("UID:1\nitem b")
a.upload(item_a) a.upload(item_a)
b.upload(item_b) b.upload(item_b)
with pytest.raises(ValueError): with pytest.raises(ValueError):
sync(a, b, {}, conflict_resolution='yolo') sync(a, b, {}, conflict_resolution="yolo")
def test_conflict_resolution_new_etags_without_changes(): def test_conflict_resolution_new_etags_without_changes():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item('UID:1') item = Item("UID:1")
href_a, etag_a = a.upload(item) href_a, etag_a = a.upload(item)
href_b, etag_b = b.upload(item) href_b, etag_b = b.upload(item)
status = {'1': (href_a, 'BOGUS_a', href_b, 'BOGUS_b')} status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")}
sync(a, b, status) sync(a, b, status)
(ident, (status_a, status_b)), = status.items() ((ident, (status_a, status_b)),) = status.items()
assert ident == '1' assert ident == "1"
assert status_a['href'] == href_a assert status_a["href"] == href_a
assert status_a['etag'] == etag_a assert status_a["etag"] == etag_a
assert status_b['href'] == href_b assert status_b["href"] == href_b
assert status_b['etag'] == etag_b assert status_b["etag"] == etag_b
def test_uses_get_multi(monkeypatch): def test_uses_get_multi(monkeypatch):
def breakdown(*a, **kw): def breakdown(*a, **kw):
raise AssertionError('Expected use of get_multi') raise AssertionError("Expected use of get_multi")
get_multi_calls = [] get_multi_calls = []
@ -324,12 +320,12 @@ def test_uses_get_multi(monkeypatch):
item, etag = old_get(self, href) item, etag = old_get(self, href)
yield href, item, etag yield href, item, etag
monkeypatch.setattr(MemoryStorage, 'get', breakdown) monkeypatch.setattr(MemoryStorage, "get", breakdown)
monkeypatch.setattr(MemoryStorage, 'get_multi', get_multi) monkeypatch.setattr(MemoryStorage, "get_multi", get_multi)
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item('UID:1') item = Item("UID:1")
expected_href, etag = a.upload(item) expected_href, etag = a.upload(item)
sync(a, b, {}) sync(a, b, {})
@ -339,8 +335,8 @@ def test_uses_get_multi(monkeypatch):
def test_empty_storage_dataloss(): def test_empty_storage_dataloss():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
a.upload(Item('UID:1')) a.upload(Item("UID:1"))
a.upload(Item('UID:2')) a.upload(Item("UID:2"))
status = {} status = {}
sync(a, b, status) sync(a, b, status)
with pytest.raises(StorageEmpty): with pytest.raises(StorageEmpty):
@ -353,22 +349,22 @@ def test_empty_storage_dataloss():
def test_no_uids(): def test_no_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
a.upload(Item('ASDF')) a.upload(Item("ASDF"))
b.upload(Item('FOOBAR')) b.upload(Item("FOOBAR"))
status = {} status = {}
sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {'ASDF', 'FOOBAR'} assert items(a) == items(b) == {"ASDF", "FOOBAR"}
def test_changed_uids(): def test_changed_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = a.upload(Item('UID:A-ONE')) href_a, etag_a = a.upload(Item("UID:A-ONE"))
href_b, etag_b = b.upload(Item('UID:B-ONE')) href_b, etag_b = b.upload(Item("UID:B-ONE"))
status = {} status = {}
sync(a, b, status) sync(a, b, status)
a.update(href_a, Item('UID:A-TWO'), etag_a) a.update(href_a, Item("UID:A-TWO"), etag_a)
sync(a, b, status) sync(a, b, status)
@ -383,71 +379,71 @@ def test_both_readonly():
def test_partial_sync_revert(): def test_partial_sync_revert():
a = MemoryStorage(instance_name='a') a = MemoryStorage(instance_name="a")
b = MemoryStorage(instance_name='b') b = MemoryStorage(instance_name="b")
status = {} status = {}
a.upload(Item('UID:1')) a.upload(Item("UID:1"))
b.upload(Item('UID:2')) b.upload(Item("UID:2"))
b.read_only = True b.read_only = True
sync(a, b, status, partial_sync='revert') sync(a, b, status, partial_sync="revert")
assert len(status) == 2 assert len(status) == 2
assert items(a) == {'UID:1', 'UID:2'} assert items(a) == {"UID:1", "UID:2"}
assert items(b) == {'UID:2'} assert items(b) == {"UID:2"}
sync(a, b, status, partial_sync='revert') sync(a, b, status, partial_sync="revert")
assert len(status) == 1 assert len(status) == 1
assert items(a) == {'UID:2'} assert items(a) == {"UID:2"}
assert items(b) == {'UID:2'} assert items(b) == {"UID:2"}
# Check that updates get reverted # Check that updates get reverted
a.items[next(iter(a.items))] = ('foo', Item('UID:2\nupdated')) a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated"))
assert items(a) == {'UID:2\nupdated'} assert items(a) == {"UID:2\nupdated"}
sync(a, b, status, partial_sync='revert') sync(a, b, status, partial_sync="revert")
assert len(status) == 1 assert len(status) == 1
assert items(a) == {'UID:2\nupdated'} assert items(a) == {"UID:2\nupdated"}
sync(a, b, status, partial_sync='revert') sync(a, b, status, partial_sync="revert")
assert items(a) == {'UID:2'} assert items(a) == {"UID:2"}
# Check that deletions get reverted # Check that deletions get reverted
a.items.clear() a.items.clear()
sync(a, b, status, partial_sync='revert', force_delete=True) sync(a, b, status, partial_sync="revert", force_delete=True)
sync(a, b, status, partial_sync='revert', force_delete=True) sync(a, b, status, partial_sync="revert", force_delete=True)
assert items(a) == {'UID:2'} assert items(a) == {"UID:2"}
@pytest.mark.parametrize('sync_inbetween', (True, False)) @pytest.mark.parametrize("sync_inbetween", (True, False))
def test_ident_conflict(sync_inbetween): def test_ident_conflict(sync_inbetween):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href_a, etag_a = a.upload(Item('UID:aaa')) href_a, etag_a = a.upload(Item("UID:aaa"))
href_b, etag_b = a.upload(Item('UID:bbb')) href_b, etag_b = a.upload(Item("UID:bbb"))
if sync_inbetween: if sync_inbetween:
sync(a, b, status) sync(a, b, status)
a.update(href_a, Item('UID:xxx'), etag_a) a.update(href_a, Item("UID:xxx"), etag_a)
a.update(href_b, Item('UID:xxx'), etag_b) a.update(href_b, Item("UID:xxx"), etag_b)
with pytest.raises(IdentConflict): with pytest.raises(IdentConflict):
sync(a, b, status) sync(a, b, status)
def test_moved_href(): def test_moved_href():
''' """
Concrete application: ppl_ stores contact aliases in filenames, which means Concrete application: ppl_ stores contact aliases in filenames, which means
item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but
also shouldn't do things like deleting and re-uploading to the server. also shouldn't do things like deleting and re-uploading to the server.
.. _ppl: http://ppladdressbook.org/ .. _ppl: http://ppladdressbook.org/
''' """
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = a.upload(Item('UID:haha')) href, etag = a.upload(Item("UID:haha"))
sync(a, b, status) sync(a, b, status)
b.items['lol'] = b.items.pop('haha') b.items["lol"] = b.items.pop("haha")
# The sync algorithm should prefetch `lol`, see that it's the same ident # The sync algorithm should prefetch `lol`, see that it's the same ident
# and not do anything else. # and not do anything else.
@ -457,8 +453,8 @@ def test_moved_href():
sync(a, b, status) sync(a, b, status)
assert len(status) == 1 assert len(status) == 1
assert items(a) == items(b) == {'UID:haha'} assert items(a) == items(b) == {"UID:haha"}
assert status['haha'][1]['href'] == 'lol' assert status["haha"][1]["href"] == "lol"
old_status = deepcopy(status) old_status = deepcopy(status)
# Further sync should be a noop. Not even prefetching should occur. # Further sync should be a noop. Not even prefetching should occur.
@ -466,39 +462,39 @@ def test_moved_href():
sync(a, b, status) sync(a, b, status)
assert old_status == status assert old_status == status
assert items(a) == items(b) == {'UID:haha'} assert items(a) == items(b) == {"UID:haha"}
def test_bogus_etag_change(): def test_bogus_etag_change():
'''Assert that sync algorithm is resilient against etag changes if content """Assert that sync algorithm is resilient against etag changes if content
didn\'t change. didn\'t change.
In this particular case we test a scenario where both etags have been In this particular case we test a scenario where both etags have been
updated, but only one side actually changed its item content. updated, but only one side actually changed its item content.
''' """
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href_a, etag_a = a.upload(Item('UID:ASDASD')) href_a, etag_a = a.upload(Item("UID:ASDASD"))
sync(a, b, status) sync(a, b, status)
assert len(status) == len(list(a.list())) == len(list(b.list())) == 1 assert len(status) == len(list(a.list())) == len(list(b.list())) == 1
(href_b, etag_b), = b.list() ((href_b, etag_b),) = b.list()
a.update(href_a, Item('UID:ASDASD'), etag_a) a.update(href_a, Item("UID:ASDASD"), etag_a)
b.update(href_b, Item('UID:ASDASD\nACTUALCHANGE:YES'), etag_b) b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b)
b.delete = b.update = b.upload = blow_up b.delete = b.update = b.upload = blow_up
sync(a, b, status) sync(a, b, status)
assert len(status) == 1 assert len(status) == 1
assert items(a) == items(b) == {'UID:ASDASD\nACTUALCHANGE:YES'} assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"}
def test_unicode_hrefs(): def test_unicode_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = a.upload(Item('UID:äää')) href, etag = a.upload(Item("UID:äää"))
sync(a, b, status) sync(a, b, status)
@ -511,27 +507,27 @@ def action_failure(*a, **kw):
class SyncMachine(RuleBasedStateMachine): class SyncMachine(RuleBasedStateMachine):
Status = Bundle('status') Status = Bundle("status")
Storage = Bundle('storage') Storage = Bundle("storage")
@rule(target=Storage, @rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans())
flaky_etags=st.booleans(),
null_etag_on_upload=st.booleans())
def newstorage(self, flaky_etags, null_etag_on_upload): def newstorage(self, flaky_etags, null_etag_on_upload):
s = MemoryStorage() s = MemoryStorage()
if flaky_etags: if flaky_etags:
def get(href): def get(href):
old_etag, item = s.items[href] old_etag, item = s.items[href]
etag = _random_string() etag = _random_string()
s.items[href] = etag, item s.items[href] = etag, item
return item, etag return item, etag
s.get = get s.get = get
if null_etag_on_upload: if null_etag_on_upload:
_old_upload = s.upload _old_upload = s.upload
_old_update = s.update _old_update = s.update
s.upload = lambda item: (_old_upload(item)[0], 'NULL') s.upload = lambda item: (_old_upload(item)[0], "NULL")
s.update = lambda h, i, e: _old_update(h, i, e) and 'NULL' s.update = lambda h, i, e: _old_update(h, i, e) and "NULL"
return s return s
@ -564,11 +560,9 @@ class SyncMachine(RuleBasedStateMachine):
def newstatus(self): def newstatus(self):
return {} return {}
@rule(storage=Storage, @rule(storage=Storage, uid=uid_strategy, etag=st.text())
uid=uid_strategy,
etag=st.text())
def upload(self, storage, uid, etag): def upload(self, storage, uid, etag):
item = Item(f'UID:{uid}') item = Item(f"UID:{uid}")
storage.items[uid] = (etag, item) storage.items[uid] = (etag, item)
@rule(storage=Storage, href=st.text()) @rule(storage=Storage, href=st.text())
@ -577,22 +571,31 @@ class SyncMachine(RuleBasedStateMachine):
@rule( @rule(
status=Status, status=Status,
a=Storage, b=Storage, a=Storage,
b=Storage,
force_delete=st.booleans(), force_delete=st.booleans(),
conflict_resolution=st.one_of((st.just('a wins'), st.just('b wins'))), conflict_resolution=st.one_of((st.just("a wins"), st.just("b wins"))),
with_error_callback=st.booleans(), with_error_callback=st.booleans(),
partial_sync=st.one_of(( partial_sync=st.one_of(
st.just('ignore'), st.just('revert'), st.just('error') (st.just("ignore"), st.just("revert"), st.just("error"))
)) ),
) )
def sync(self, status, a, b, force_delete, conflict_resolution, def sync(
with_error_callback, partial_sync): self,
status,
a,
b,
force_delete,
conflict_resolution,
with_error_callback,
partial_sync,
):
assume(a is not b) assume(a is not b)
old_items_a = items(a) old_items_a = items(a)
old_items_b = items(b) old_items_b = items(b)
a.instance_name = 'a' a.instance_name = "a"
b.instance_name = 'b' b.instance_name = "b"
errors = [] errors = []
@ -605,16 +608,20 @@ class SyncMachine(RuleBasedStateMachine):
# If one storage is read-only, double-sync because changes don't # If one storage is read-only, double-sync because changes don't
# get reverted immediately. # get reverted immediately.
for _ in range(2 if a.read_only or b.read_only else 1): for _ in range(2 if a.read_only or b.read_only else 1):
sync(a, b, status, sync(
force_delete=force_delete, a,
conflict_resolution=conflict_resolution, b,
error_callback=error_callback, status,
partial_sync=partial_sync) force_delete=force_delete,
conflict_resolution=conflict_resolution,
error_callback=error_callback,
partial_sync=partial_sync,
)
for e in errors: for e in errors:
raise e raise e
except PartialSync: except PartialSync:
assert partial_sync == 'error' assert partial_sync == "error"
except ActionIntentionallyFailed: except ActionIntentionallyFailed:
pass pass
except BothReadOnly: except BothReadOnly:
@ -629,49 +636,55 @@ class SyncMachine(RuleBasedStateMachine):
items_a = items(a) items_a = items(a)
items_b = items(b) items_b = items(b)
assert items_a == items_b or partial_sync == 'ignore' assert items_a == items_b or partial_sync == "ignore"
assert items_a == old_items_a or not a.read_only assert items_a == old_items_a or not a.read_only
assert items_b == old_items_b or not b.read_only assert items_b == old_items_b or not b.read_only
assert set(a.items) | set(b.items) == set(status) or \ assert (
partial_sync == 'ignore' set(a.items) | set(b.items) == set(status) or partial_sync == "ignore"
)
TestSyncMachine = SyncMachine.TestCase TestSyncMachine = SyncMachine.TestCase
@pytest.mark.parametrize('error_callback', [True, False]) @pytest.mark.parametrize("error_callback", [True, False])
def test_rollback(error_callback): def test_rollback(error_callback):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.items['0'] = ('', Item('UID:0')) a.items["0"] = ("", Item("UID:0"))
b.items['1'] = ('', Item('UID:1')) b.items["1"] = ("", Item("UID:1"))
b.upload = b.update = b.delete = action_failure b.upload = b.update = b.delete = action_failure
if error_callback: if error_callback:
errors = [] errors = []
sync(a, b, status=status, conflict_resolution='a wins', sync(
error_callback=errors.append) a,
b,
status=status,
conflict_resolution="a wins",
error_callback=errors.append,
)
assert len(errors) == 1 assert len(errors) == 1
assert isinstance(errors[0], ActionIntentionallyFailed) assert isinstance(errors[0], ActionIntentionallyFailed)
assert len(status) == 1 assert len(status) == 1
assert status['1'] assert status["1"]
else: else:
with pytest.raises(ActionIntentionallyFailed): with pytest.raises(ActionIntentionallyFailed):
sync(a, b, status=status, conflict_resolution='a wins') sync(a, b, status=status, conflict_resolution="a wins")
def test_duplicate_hrefs(): def test_duplicate_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
a.list = lambda: [('a', 'a')] * 3 a.list = lambda: [("a", "a")] * 3
a.items['a'] = ('a', Item('UID:a')) a.items["a"] = ("a", Item("UID:a"))
status = {} status = {}
sync(a, b, status) sync(a, b, status)

View file

@ -2,13 +2,12 @@ from vdirsyncer import exceptions
def test_user_error_problems(): def test_user_error_problems():
e = exceptions.UserError('A few problems occurred', problems=[ e = exceptions.UserError(
'Problem one', "A few problems occurred",
'Problem two', problems=["Problem one", "Problem two", "Problem three"],
'Problem three' )
])
assert 'one' in str(e) assert "one" in str(e)
assert 'two' in str(e) assert "two" in str(e)
assert 'three' in str(e) assert "three" in str(e)
assert 'problems occurred' in str(e) assert "problems occurred" in str(e)

View file

@ -15,7 +15,7 @@ from vdirsyncer.storage.memory import MemoryStorage
def test_irrelevant_status(): def test_irrelevant_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {'foo': 'bar'} status = {"foo": "bar"}
metasync(a, b, status, keys=()) metasync(a, b, status, keys=())
assert not status assert not status
@ -26,24 +26,24 @@ def test_basic(monkeypatch):
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.set_meta('foo', 'bar') a.set_meta("foo", "bar")
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
assert a.get_meta('foo') == b.get_meta('foo') == 'bar' assert a.get_meta("foo") == b.get_meta("foo") == "bar"
a.set_meta('foo', 'baz') a.set_meta("foo", "baz")
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
assert a.get_meta('foo') == b.get_meta('foo') == 'baz' assert a.get_meta("foo") == b.get_meta("foo") == "baz"
monkeypatch.setattr(a, 'set_meta', blow_up) monkeypatch.setattr(a, "set_meta", blow_up)
monkeypatch.setattr(b, 'set_meta', blow_up) monkeypatch.setattr(b, "set_meta", blow_up)
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
assert a.get_meta('foo') == b.get_meta('foo') == 'baz' assert a.get_meta("foo") == b.get_meta("foo") == "baz"
monkeypatch.undo() monkeypatch.undo()
monkeypatch.undo() monkeypatch.undo()
b.set_meta('foo', None) b.set_meta("foo", None)
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
assert not a.get_meta('foo') and not b.get_meta('foo') assert not a.get_meta("foo") and not b.get_meta("foo")
@pytest.fixture @pytest.fixture
@ -51,12 +51,12 @@ def conflict_state(request):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.set_meta('foo', 'bar') a.set_meta("foo", "bar")
b.set_meta('foo', 'baz') b.set_meta("foo", "baz")
def cleanup(): def cleanup():
assert a.get_meta('foo') == 'bar' assert a.get_meta("foo") == "bar"
assert b.get_meta('foo') == 'baz' assert b.get_meta("foo") == "baz"
assert not status assert not status
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
@ -68,54 +68,61 @@ def test_conflict(conflict_state):
a, b, status = conflict_state a, b, status = conflict_state
with pytest.raises(MetaSyncConflict): with pytest.raises(MetaSyncConflict):
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
def test_invalid_conflict_resolution(conflict_state): def test_invalid_conflict_resolution(conflict_state):
a, b, status = conflict_state a, b, status = conflict_state
with pytest.raises(UserError) as excinfo: with pytest.raises(UserError) as excinfo:
metasync(a, b, status, keys=['foo'], conflict_resolution='foo') metasync(a, b, status, keys=["foo"], conflict_resolution="foo")
assert 'Invalid conflict resolution setting' in str(excinfo.value) assert "Invalid conflict resolution setting" in str(excinfo.value)
def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch): def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch):
a, b, status = conflict_state a, b, status = conflict_state
warnings = [] warnings = []
monkeypatch.setattr(logger, 'warning', warnings.append) monkeypatch.setattr(logger, "warning", warnings.append)
with pytest.raises(MetaSyncConflict): with pytest.raises(MetaSyncConflict):
metasync(a, b, status, keys=['foo'], metasync(a, b, status, keys=["foo"], conflict_resolution=lambda *a, **kw: None)
conflict_resolution=lambda *a, **kw: None)
assert warnings == ['Custom commands don\'t work on metasync.'] assert warnings == ["Custom commands don't work on metasync."]
def test_conflict_same_content(): def test_conflict_same_content():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.set_meta('foo', 'bar') a.set_meta("foo", "bar")
b.set_meta('foo', 'bar') b.set_meta("foo", "bar")
metasync(a, b, status, keys=['foo']) metasync(a, b, status, keys=["foo"])
assert a.get_meta('foo') == b.get_meta('foo') == status['foo'] == 'bar' assert a.get_meta("foo") == b.get_meta("foo") == status["foo"] == "bar"
@pytest.mark.parametrize('wins', 'ab') @pytest.mark.parametrize("wins", "ab")
def test_conflict_x_wins(wins): def test_conflict_x_wins(wins):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
a.set_meta('foo', 'bar') a.set_meta("foo", "bar")
b.set_meta('foo', 'baz') b.set_meta("foo", "baz")
metasync(a, b, status, keys=['foo'], metasync(
conflict_resolution='a wins' if wins == 'a' else 'b wins') a,
b,
status,
keys=["foo"],
conflict_resolution="a wins" if wins == "a" else "b wins",
)
assert a.get_meta('foo') == b.get_meta('foo') == status['foo'] == ( assert (
'bar' if wins == 'a' else 'baz' a.get_meta("foo")
== b.get_meta("foo")
== status["foo"]
== ("bar" if wins == "a" else "baz")
) )
@ -125,33 +132,40 @@ metadata = st.dictionaries(keys, values)
@given( @given(
a=metadata, b=metadata, a=metadata,
status=metadata, keys=st.sets(keys), b=metadata,
conflict_resolution=st.just('a wins') | st.just('b wins') status=metadata,
keys=st.sets(keys),
conflict_resolution=st.just("a wins") | st.just("b wins"),
)
@example(
a={"0": "0"}, b={}, status={"0": "0"}, keys={"0"}, conflict_resolution="a wins"
)
@example(
a={"0": "0"},
b={"0": "1"},
status={"0": "0"},
keys={"0"},
conflict_resolution="a wins",
) )
@example(a={'0': '0'}, b={}, status={'0': '0'}, keys={'0'},
conflict_resolution='a wins')
@example(a={'0': '0'}, b={'0': '1'}, status={'0': '0'}, keys={'0'},
conflict_resolution='a wins')
def test_fuzzing(a, b, status, keys, conflict_resolution): def test_fuzzing(a, b, status, keys, conflict_resolution):
def _get_storage(m, instance_name): def _get_storage(m, instance_name):
s = MemoryStorage(instance_name=instance_name) s = MemoryStorage(instance_name=instance_name)
s.metadata = m s.metadata = m
return s return s
a = _get_storage(a, 'A') a = _get_storage(a, "A")
b = _get_storage(b, 'B') b = _get_storage(b, "B")
winning_storage = (a if conflict_resolution == 'a wins' else b) winning_storage = a if conflict_resolution == "a wins" else b
expected_values = {key: winning_storage.get_meta(key) expected_values = {
for key in keys key: winning_storage.get_meta(key) for key in keys if key not in status
if key not in status} }
metasync(a, b, status, metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution)
keys=keys, conflict_resolution=conflict_resolution)
for key in keys: for key in keys:
s = status.get(key, '') s = status.get(key, "")
assert a.get_meta(key) == b.get_meta(key) == s assert a.get_meta(key) == b.get_meta(key) == s
if expected_values.get(key, '') and s: if expected_values.get(key, "") and s:
assert s == expected_values[key] assert s == expected_values[key]

View file

@ -18,14 +18,8 @@ from vdirsyncer.vobject import Item
def test_repair_uids(uid): def test_repair_uids(uid):
s = MemoryStorage() s = MemoryStorage()
s.items = { s.items = {
'one': ( "one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")),
'asdf', "two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")),
Item(f'BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD')
),
'two': (
'asdf',
Item(f'BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD')
)
} }
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()] uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
@ -42,7 +36,7 @@ def test_repair_uids(uid):
@settings(suppress_health_check=HealthCheck.all()) @settings(suppress_health_check=HealthCheck.all())
def test_repair_unsafe_uids(uid): def test_repair_unsafe_uids(uid):
s = MemoryStorage() s = MemoryStorage()
item = Item(f'BEGIN:VCARD\nUID:{uid}\nEND:VCARD') item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
href, etag = s.upload(item) href, etag = s.upload(item)
assert s.get(href)[0].uid == uid assert s.get(href)[0].uid == uid
assert not href_safe(uid) assert not href_safe(uid)
@ -55,12 +49,11 @@ def test_repair_unsafe_uids(uid):
assert href_safe(newuid) assert href_safe(newuid)
@pytest.mark.parametrize('uid,href', [ @pytest.mark.parametrize(
('b@dh0mbr3', 'perfectly-fine'), "uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
('perfectly-fine', 'b@dh0mbr3') )
])
def test_repair_unsafe_href(uid, href): def test_repair_unsafe_href(uid, href):
item = Item(f'BEGIN:VCARD\nUID:{uid}\nEND:VCARD') item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
new_item = repair_item(href, item, set(), True) new_item = repair_item(href, item, set(), True)
assert new_item.raw != item.raw assert new_item.raw != item.raw
assert new_item.uid != item.uid assert new_item.uid != item.uid
@ -68,18 +61,14 @@ def test_repair_unsafe_href(uid, href):
def test_repair_do_nothing(): def test_repair_do_nothing():
item = Item('BEGIN:VCARD\nUID:justfine\nEND:VCARD') item = Item("BEGIN:VCARD\nUID:justfine\nEND:VCARD")
assert repair_item('fine', item, set(), True) is item assert repair_item("fine", item, set(), True) is item
assert repair_item('@@@@/fine', item, set(), True) is item assert repair_item("@@@@/fine", item, set(), True) is item
@pytest.mark.parametrize('raw', [ @pytest.mark.parametrize(
'AYYY', "raw", ["AYYY", "", "@@@@", "BEGIN:VCARD", "BEGIN:FOO\nEND:FOO"]
'', )
'@@@@',
'BEGIN:VCARD',
'BEGIN:FOO\nEND:FOO'
])
def test_repair_irreparable(raw): def test_repair_irreparable(raw):
with pytest.raises(IrreparableItem): with pytest.raises(IrreparableItem):
repair_item('fine', Item(raw), set(), True) repair_item("fine", Item(raw), set(), True)

View file

@ -20,40 +20,35 @@ from tests import VCARD_TEMPLATE
_simple_split = [ _simple_split = [
VCARD_TEMPLATE.format(r=123, uid=123), VCARD_TEMPLATE.format(r=123, uid=123),
VCARD_TEMPLATE.format(r=345, uid=345), VCARD_TEMPLATE.format(r=345, uid=345),
VCARD_TEMPLATE.format(r=678, uid=678) VCARD_TEMPLATE.format(r=678, uid=678),
] ]
_simple_joined = '\r\n'.join( _simple_joined = "\r\n".join(
['BEGIN:VADDRESSBOOK'] ["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"]
+ _simple_split
+ ['END:VADDRESSBOOK\r\n']
) )
def test_split_collection_simple(benchmark): def test_split_collection_simple(benchmark):
given = benchmark(lambda: list(vobject.split_collection(_simple_joined))) given = benchmark(lambda: list(vobject.split_collection(_simple_joined)))
assert [normalize_item(item) for item in given] == \ assert [normalize_item(item) for item in given] == [
[normalize_item(item) for item in _simple_split] normalize_item(item) for item in _simple_split
]
assert [x.splitlines() for x in given] == \ assert [x.splitlines() for x in given] == [x.splitlines() for x in _simple_split]
[x.splitlines() for x in _simple_split]
def test_split_collection_multiple_wrappers(benchmark): def test_split_collection_multiple_wrappers(benchmark):
joined = '\r\n'.join( joined = "\r\n".join(
'BEGIN:VADDRESSBOOK\r\n' "BEGIN:VADDRESSBOOK\r\n" + x + "\r\nEND:VADDRESSBOOK\r\n" for x in _simple_split
+ x
+ '\r\nEND:VADDRESSBOOK\r\n'
for x in _simple_split
) )
given = benchmark(lambda: list(vobject.split_collection(joined))) given = benchmark(lambda: list(vobject.split_collection(joined)))
assert [normalize_item(item) for item in given] == \ assert [normalize_item(item) for item in given] == [
[normalize_item(item) for item in _simple_split] normalize_item(item) for item in _simple_split
]
assert [x.splitlines() for x in given] == \ assert [x.splitlines() for x in given] == [x.splitlines() for x in _simple_split]
[x.splitlines() for x in _simple_split]
def test_join_collection_simple(benchmark): def test_join_collection_simple(benchmark):
@ -63,8 +58,11 @@ def test_join_collection_simple(benchmark):
def test_join_collection_vevents(benchmark): def test_join_collection_vevents(benchmark):
actual = benchmark(lambda: vobject.join_collection([ actual = benchmark(
dedent(""" lambda: vobject.join_collection(
[
dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:HUEHUE PRODID:HUEHUE
@ -75,10 +73,15 @@ def test_join_collection_vevents(benchmark):
VALUE:Event {} VALUE:Event {}
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
""").format(i) for i in range(3) """
])) ).format(i)
for i in range(3)
]
)
)
expected = dedent(""" expected = dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:HUEHUE PRODID:HUEHUE
@ -95,7 +98,8 @@ def test_join_collection_vevents(benchmark):
VALUE:Event 2 VALUE:Event 2
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
""").lstrip() """
).lstrip()
assert actual.splitlines() == expected.splitlines() assert actual.splitlines() == expected.splitlines()
@ -103,34 +107,29 @@ def test_join_collection_vevents(benchmark):
def test_split_collection_timezones(): def test_split_collection_timezones():
items = [ items = [
BARE_EVENT_TEMPLATE.format(r=123, uid=123), BARE_EVENT_TEMPLATE.format(r=123, uid=123),
BARE_EVENT_TEMPLATE.format(r=345, uid=345) BARE_EVENT_TEMPLATE.format(r=345, uid=345),
] ]
timezone = ( timezone = (
'BEGIN:VTIMEZONE\r\n' "BEGIN:VTIMEZONE\r\n"
'TZID:/mozilla.org/20070129_1/Asia/Tokyo\r\n' "TZID:/mozilla.org/20070129_1/Asia/Tokyo\r\n"
'X-LIC-LOCATION:Asia/Tokyo\r\n' "X-LIC-LOCATION:Asia/Tokyo\r\n"
'BEGIN:STANDARD\r\n' "BEGIN:STANDARD\r\n"
'TZOFFSETFROM:+0900\r\n' "TZOFFSETFROM:+0900\r\n"
'TZOFFSETTO:+0900\r\n' "TZOFFSETTO:+0900\r\n"
'TZNAME:JST\r\n' "TZNAME:JST\r\n"
'DTSTART:19700101T000000\r\n' "DTSTART:19700101T000000\r\n"
'END:STANDARD\r\n' "END:STANDARD\r\n"
'END:VTIMEZONE' "END:VTIMEZONE"
) )
full = '\r\n'.join( full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"])
['BEGIN:VCALENDAR']
+ items
+ [timezone, 'END:VCALENDAR']
)
given = {normalize_item(item) given = {normalize_item(item) for item in vobject.split_collection(full)}
for item in vobject.split_collection(full)}
expected = { expected = {
normalize_item('\r\n'.join(( normalize_item(
'BEGIN:VCALENDAR', item, timezone, 'END:VCALENDAR' "\r\n".join(("BEGIN:VCALENDAR", item, timezone, "END:VCALENDAR"))
))) )
for item in items for item in items
} }
@ -138,32 +137,28 @@ def test_split_collection_timezones():
def test_split_contacts(): def test_split_contacts():
bare = '\r\n'.join([VCARD_TEMPLATE.format(r=x, uid=x) for x in range(4)]) bare = "\r\n".join([VCARD_TEMPLATE.format(r=x, uid=x) for x in range(4)])
with_wrapper = 'BEGIN:VADDRESSBOOK\r\n' + bare + '\nEND:VADDRESSBOOK\r\n' with_wrapper = "BEGIN:VADDRESSBOOK\r\n" + bare + "\nEND:VADDRESSBOOK\r\n"
for _ in (bare, with_wrapper): for _ in (bare, with_wrapper):
split = list(vobject.split_collection(bare)) split = list(vobject.split_collection(bare))
assert len(split) == 4 assert len(split) == 4
assert vobject.join_collection(split).splitlines() == \ assert vobject.join_collection(split).splitlines() == with_wrapper.splitlines()
with_wrapper.splitlines()
def test_hash_item(): def test_hash_item():
a = EVENT_TEMPLATE.format(r=1, uid=1) a = EVENT_TEMPLATE.format(r=1, uid=1)
b = '\n'.join(line for line in a.splitlines() b = "\n".join(line for line in a.splitlines() if "PRODID" not in line)
if 'PRODID' not in line)
assert vobject.hash_item(a) == vobject.hash_item(b) assert vobject.hash_item(a) == vobject.hash_item(b)
def test_multiline_uid(benchmark): def test_multiline_uid(benchmark):
a = ('BEGIN:FOO\r\n' a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n"
'UID:123456789abcd\r\n' assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
' efgh\r\n'
'END:FOO\r\n')
assert benchmark(lambda: vobject.Item(a).uid) == '123456789abcdefgh'
complex_uid_item = dedent(''' complex_uid_item = dedent(
"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
BEGIN:VTIMEZONE BEGIN:VTIMEZONE
TZID:Europe/Rome TZID:Europe/Rome
@ -199,99 +194,102 @@ complex_uid_item = dedent('''
TRANSP:OPAQUE TRANSP:OPAQUE
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
''').strip() """
).strip()
def test_multiline_uid_complex(benchmark): def test_multiline_uid_complex(benchmark):
assert benchmark(lambda: vobject.Item(complex_uid_item).uid) == ( assert benchmark(lambda: vobject.Item(complex_uid_item).uid) == (
'040000008200E00074C5B7101A82E008000000005' "040000008200E00074C5B7101A82E008000000005"
'0AAABEEF50DCF001000000062548482FA830A46B9' "0AAABEEF50DCF001000000062548482FA830A46B9"
'EA62114AC9F0EF' "EA62114AC9F0EF"
) )
def test_replace_multiline_uid(benchmark): def test_replace_multiline_uid(benchmark):
def inner(): def inner():
return vobject.Item(complex_uid_item).with_uid('a').uid return vobject.Item(complex_uid_item).with_uid("a").uid
assert benchmark(inner) == 'a' assert benchmark(inner) == "a"
@pytest.mark.parametrize('template', [EVENT_TEMPLATE, @pytest.mark.parametrize(
EVENT_WITH_TIMEZONE_TEMPLATE, "template", [EVENT_TEMPLATE, EVENT_WITH_TIMEZONE_TEMPLATE, VCARD_TEMPLATE]
VCARD_TEMPLATE]) )
@given(uid=st.one_of(st.none(), uid_strategy)) @given(uid=st.one_of(st.none(), uid_strategy))
def test_replace_uid(template, uid): def test_replace_uid(template, uid):
item = vobject.Item(template.format(r=123, uid=123)).with_uid(uid) item = vobject.Item(template.format(r=123, uid=123)).with_uid(uid)
assert item.uid == uid assert item.uid == uid
if uid: if uid:
assert item.raw.count(f'\nUID:{uid}') == 1 assert item.raw.count(f"\nUID:{uid}") == 1
else: else:
assert '\nUID:' not in item.raw assert "\nUID:" not in item.raw
def test_broken_item(): def test_broken_item():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
vobject._Component.parse('END:FOO') vobject._Component.parse("END:FOO")
assert 'Parsing error at line 1' in str(excinfo.value) assert "Parsing error at line 1" in str(excinfo.value)
item = vobject.Item('END:FOO') item = vobject.Item("END:FOO")
assert item.parsed is None assert item.parsed is None
def test_multiple_items(): def test_multiple_items():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
vobject._Component.parse([ vobject._Component.parse(
'BEGIN:FOO', [
'END:FOO', "BEGIN:FOO",
'BEGIN:FOO', "END:FOO",
'END:FOO', "BEGIN:FOO",
]) "END:FOO",
]
)
assert 'Found 2 components, expected one' in str(excinfo.value) assert "Found 2 components, expected one" in str(excinfo.value)
c1, c2 = vobject._Component.parse([ c1, c2 = vobject._Component.parse(
'BEGIN:FOO', [
'END:FOO', "BEGIN:FOO",
'BEGIN:FOO', "END:FOO",
'END:FOO', "BEGIN:FOO",
], multiple=True) "END:FOO",
assert c1.name == c2.name == 'FOO' ],
multiple=True,
)
assert c1.name == c2.name == "FOO"
def test_input_types(): def test_input_types():
lines = ['BEGIN:FOO', 'FOO:BAR', 'END:FOO'] lines = ["BEGIN:FOO", "FOO:BAR", "END:FOO"]
for x in (lines, '\r\n'.join(lines), '\r\n'.join(lines).encode('ascii')): for x in (lines, "\r\n".join(lines), "\r\n".join(lines).encode("ascii")):
c = vobject._Component.parse(x) c = vobject._Component.parse(x)
assert c.name == 'FOO' assert c.name == "FOO"
assert c.props == ['FOO:BAR'] assert c.props == ["FOO:BAR"]
assert not c.subcomponents assert not c.subcomponents
value_strategy = st.text( value_strategy = st.text(
st.characters(blacklist_categories=( st.characters(
'Zs', 'Zl', 'Zp', blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":="
'Cc', 'Cs' ),
), blacklist_characters=':='), min_size=1,
min_size=1
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)
class VobjectMachine(RuleBasedStateMachine): class VobjectMachine(RuleBasedStateMachine):
Unparsed = Bundle('unparsed') Unparsed = Bundle("unparsed")
Parsed = Bundle('parsed') Parsed = Bundle("parsed")
@rule(target=Unparsed, @rule(target=Unparsed, joined=st.booleans(), encoded=st.booleans())
joined=st.booleans(),
encoded=st.booleans())
def get_unparsed_lines(self, joined, encoded): def get_unparsed_lines(self, joined, encoded):
rv = ['BEGIN:FOO', 'FOO:YES', 'END:FOO'] rv = ["BEGIN:FOO", "FOO:YES", "END:FOO"]
if joined: if joined:
rv = '\r\n'.join(rv) rv = "\r\n".join(rv)
if encoded: if encoded:
rv = rv.encode('utf-8') rv = rv.encode("utf-8")
elif encoded: elif encoded:
assume(False) assume(False)
return rv return rv
@ -304,24 +302,24 @@ class VobjectMachine(RuleBasedStateMachine):
def serialize(self, parsed): def serialize(self, parsed):
return list(parsed.dump_lines()) return list(parsed.dump_lines())
@rule(c=Parsed, @rule(c=Parsed, key=uid_strategy, value=uid_strategy)
key=uid_strategy,
value=uid_strategy)
def add_prop(self, c, key, value): def add_prop(self, c, key, value):
c[key] = value c[key] = value
assert c[key] == value assert c[key] == value
assert key in c assert key in c
assert c.get(key) == value assert c.get(key) == value
dump = '\r\n'.join(c.dump_lines()) dump = "\r\n".join(c.dump_lines())
assert key in dump and value in dump assert key in dump and value in dump
@rule(c=Parsed, @rule(
key=uid_strategy, c=Parsed,
value=uid_strategy, key=uid_strategy,
params=st.lists(st.tuples(value_strategy, value_strategy))) value=uid_strategy,
params=st.lists(st.tuples(value_strategy, value_strategy)),
)
def add_prop_raw(self, c, key, value, params): def add_prop_raw(self, c, key, value, params):
params_str = ','.join(k + '=' + v for k, v in params) params_str = ",".join(k + "=" + v for k, v in params)
c.props.insert(0, f'{key};{params_str}:{value}') c.props.insert(0, f"{key};{params_str}:{value}")
assert c[key] == value assert c[key] == value
assert key in c assert key in c
assert c.get(key) == value assert c.get(key) == value
@ -330,7 +328,7 @@ class VobjectMachine(RuleBasedStateMachine):
def add_component(self, c, sub_c): def add_component(self, c, sub_c):
assume(sub_c is not c and sub_c not in c) assume(sub_c is not c and sub_c not in c)
c.subcomponents.append(sub_c) c.subcomponents.append(sub_c)
assert '\r\n'.join(sub_c.dump_lines()) in '\r\n'.join(c.dump_lines()) assert "\r\n".join(sub_c.dump_lines()) in "\r\n".join(c.dump_lines())
@rule(c=Parsed) @rule(c=Parsed)
def sanity_check(self, c): def sanity_check(self, c):
@ -342,14 +340,10 @@ TestVobjectMachine = VobjectMachine.TestCase
def test_component_contains(): def test_component_contains():
item = vobject._Component.parse([ item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
'BEGIN:FOO',
'FOO:YES',
'END:FOO'
])
assert 'FOO' in item assert "FOO" in item
assert 'BAZ' not in item assert "BAZ" not in item
with pytest.raises(ValueError): with pytest.raises(ValueError):
42 in item 42 in item # noqa: B015

View file

@ -1,26 +1,27 @@
''' """
Vdirsyncer synchronizes calendars and contacts. Vdirsyncer synchronizes calendars and contacts.
''' """
PROJECT_HOME = 'https://github.com/pimutils/vdirsyncer' PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
BUGTRACKER_HOME = PROJECT_HOME + '/issues' BUGTRACKER_HOME = PROJECT_HOME + "/issues"
DOCS_HOME = 'https://vdirsyncer.pimutils.org/en/stable' DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
try: try:
from .version import version as __version__ # noqa from .version import version as __version__ # noqa
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
raise ImportError( raise ImportError(
'Failed to find (autogenerated) version.py. ' "Failed to find (autogenerated) version.py. "
'This might be because you are installing from GitHub\'s tarballs, ' "This might be because you are installing from GitHub's tarballs, "
'use the PyPI ones.' "use the PyPI ones."
) )
def _check_python_version(): # pragma: no cover def _check_python_version(): # pragma: no cover
import sys import sys
if sys.version_info < (3, 7, 0): if sys.version_info < (3, 7, 0):
print('vdirsyncer requires at least Python 3.7.') print("vdirsyncer requires at least Python 3.7.")
sys.exit(1) sys.exit(1)

View file

@ -1,3 +1,4 @@
if __name__ == '__main__': if __name__ == "__main__":
from vdirsyncer.cli import app from vdirsyncer.cli import app
app() app()

View file

@ -10,7 +10,7 @@ from .. import BUGTRACKER_HOME
cli_logger = logging.getLogger(__name__) cli_logger = logging.getLogger(__name__)
click_log.basic_config('vdirsyncer') click_log.basic_config("vdirsyncer")
class AppContext: class AppContext:
@ -30,6 +30,7 @@ def catch_errors(f):
f(*a, **kw) f(*a, **kw)
except BaseException: except BaseException:
from .utils import handle_cli_error from .utils import handle_cli_error
handle_cli_error() handle_cli_error()
sys.exit(1) sys.exit(1)
@ -37,24 +38,26 @@ def catch_errors(f):
@click.group() @click.group()
@click_log.simple_verbosity_option('vdirsyncer') @click_log.simple_verbosity_option("vdirsyncer")
@click.version_option(version=__version__) @click.version_option(version=__version__)
@click.option('--config', '-c', metavar='FILE', help='Config file to use.') @click.option("--config", "-c", metavar="FILE", help="Config file to use.")
@pass_context @pass_context
@catch_errors @catch_errors
def app(ctx, config): def app(ctx, config):
''' """
Synchronize calendars and contacts Synchronize calendars and contacts
''' """
if sys.platform == 'win32': if sys.platform == "win32":
cli_logger.warning('Vdirsyncer currently does not support Windows. ' cli_logger.warning(
'You will likely encounter bugs. ' "Vdirsyncer currently does not support Windows. "
'See {}/535 for more information.' "You will likely encounter bugs. "
.format(BUGTRACKER_HOME)) "See {}/535 for more information.".format(BUGTRACKER_HOME)
)
if not ctx.config: if not ctx.config:
from .config import load_config from .config import load_config
ctx.config = load_config(config) ctx.config = load_config(config)
@ -62,40 +65,44 @@ main = app
def max_workers_callback(ctx, param, value): def max_workers_callback(ctx, param, value):
if value == 0 and logging.getLogger('vdirsyncer').level == logging.DEBUG: if value == 0 and logging.getLogger("vdirsyncer").level == logging.DEBUG:
value = 1 value = 1
cli_logger.debug(f'Using {value} maximal workers.') cli_logger.debug(f"Using {value} maximal workers.")
return value return value
def max_workers_option(default=0): def max_workers_option(default=0):
help = 'Use at most this many connections. ' help = "Use at most this many connections. "
if default == 0: if default == 0:
help += 'The default is 0, which means "as many as necessary". ' \ help += (
'With -vdebug enabled, the default is 1.' 'The default is 0, which means "as many as necessary". '
"With -vdebug enabled, the default is 1."
)
else: else:
help += f'The default is {default}.' help += f"The default is {default}."
return click.option( return click.option(
'--max-workers', default=default, type=click.IntRange(min=0, max=None), "--max-workers",
default=default,
type=click.IntRange(min=0, max=None),
callback=max_workers_callback, callback=max_workers_callback,
help=help help=help,
) )
def collections_arg_callback(ctx, param, value): def collections_arg_callback(ctx, param, value):
''' """
Expand the various CLI shortforms ("pair, pair/collection") to an iterable Expand the various CLI shortforms ("pair, pair/collection") to an iterable
of (pair, collections). of (pair, collections).
''' """
# XXX: Ugly! pass_context should work everywhere. # XXX: Ugly! pass_context should work everywhere.
config = ctx.find_object(AppContext).config config = ctx.find_object(AppContext).config
rv = {} rv = {}
for pair_and_collection in (value or config.pairs): for pair_and_collection in value or config.pairs:
pair, collection = pair_and_collection, None pair, collection = pair_and_collection, None
if '/' in pair: if "/" in pair:
pair, collection = pair.split('/') pair, collection = pair.split("/")
collections = rv.setdefault(pair, set()) collections = rv.setdefault(pair, set())
if collection: if collection:
@ -104,20 +111,25 @@ def collections_arg_callback(ctx, param, value):
return rv.items() return rv.items()
collections_arg = click.argument('collections', nargs=-1, collections_arg = click.argument(
callback=collections_arg_callback) "collections", nargs=-1, callback=collections_arg_callback
)
@app.command() @app.command()
@collections_arg @collections_arg
@click.option('--force-delete/--no-force-delete', @click.option(
help=('Do/Don\'t abort synchronization when all items are about ' "--force-delete/--no-force-delete",
'to be deleted from both sides.')) help=(
"Do/Don't abort synchronization when all items are about "
"to be deleted from both sides."
),
)
@max_workers_option() @max_workers_option()
@pass_context @pass_context
@catch_errors @catch_errors
def sync(ctx, collections, force_delete, max_workers): def sync(ctx, collections, force_delete, max_workers):
''' """
Synchronize the given collections or pairs. If no arguments are given, all Synchronize the given collections or pairs. If no arguments are given, all
will be synchronized. will be synchronized.
@ -136,7 +148,7 @@ def sync(ctx, collections, force_delete, max_workers):
\b \b
# Sync only "first_collection" from the pair "bob" # Sync only "first_collection" from the pair "bob"
vdirsyncer sync bob/first_collection vdirsyncer sync bob/first_collection
''' """
from .tasks import prepare_pair, sync_collection from .tasks import prepare_pair, sync_collection
from .utils import WorkerQueue from .utils import WorkerQueue
@ -144,11 +156,16 @@ def sync(ctx, collections, force_delete, max_workers):
with wq.join(): with wq.join():
for pair_name, collections in collections: for pair_name, collections in collections:
wq.put(functools.partial(prepare_pair, pair_name=pair_name, wq.put(
collections=collections, functools.partial(
config=ctx.config, prepare_pair,
force_delete=force_delete, pair_name=pair_name,
callback=sync_collection)) collections=collections,
config=ctx.config,
force_delete=force_delete,
callback=sync_collection,
)
)
wq.spawn_worker() wq.spawn_worker()
@ -158,11 +175,11 @@ def sync(ctx, collections, force_delete, max_workers):
@pass_context @pass_context
@catch_errors @catch_errors
def metasync(ctx, collections, max_workers): def metasync(ctx, collections, max_workers):
''' """
Synchronize metadata of the given collections or pairs. Synchronize metadata of the given collections or pairs.
See the `sync` command for usage. See the `sync` command for usage.
''' """
from .tasks import prepare_pair, metasync_collection from .tasks import prepare_pair, metasync_collection
from .utils import WorkerQueue from .utils import WorkerQueue
@ -170,59 +187,73 @@ def metasync(ctx, collections, max_workers):
with wq.join(): with wq.join():
for pair_name, collections in collections: for pair_name, collections in collections:
wq.put(functools.partial(prepare_pair, pair_name=pair_name, wq.put(
collections=collections, functools.partial(
config=ctx.config, prepare_pair,
callback=metasync_collection)) pair_name=pair_name,
collections=collections,
config=ctx.config,
callback=metasync_collection,
)
)
wq.spawn_worker() wq.spawn_worker()
@app.command() @app.command()
@click.argument('pairs', nargs=-1) @click.argument("pairs", nargs=-1)
@click.option( @click.option(
'--list/--no-list', default=True, "--list/--no-list",
default=True,
help=( help=(
'Whether to list all collections from both sides during discovery, ' "Whether to list all collections from both sides during discovery, "
'for debugging. This is slow and may crash for broken servers.' "for debugging. This is slow and may crash for broken servers."
) ),
) )
@max_workers_option(default=1) @max_workers_option(default=1)
@pass_context @pass_context
@catch_errors @catch_errors
def discover(ctx, pairs, max_workers, list): def discover(ctx, pairs, max_workers, list):
''' """
Refresh collection cache for the given pairs. Refresh collection cache for the given pairs.
''' """
from .tasks import discover_collections from .tasks import discover_collections
from .utils import WorkerQueue from .utils import WorkerQueue
config = ctx.config config = ctx.config
wq = WorkerQueue(max_workers) wq = WorkerQueue(max_workers)
with wq.join(): with wq.join():
for pair_name in (pairs or config.pairs): for pair_name in pairs or config.pairs:
pair = config.get_pair(pair_name) pair = config.get_pair(pair_name)
wq.put(functools.partial( wq.put(
discover_collections, functools.partial(
status_path=config.general['status_path'], discover_collections,
pair=pair, status_path=config.general["status_path"],
from_cache=False, pair=pair,
list_collections=list, from_cache=False,
)) list_collections=list,
)
)
wq.spawn_worker() wq.spawn_worker()
@app.command() @app.command()
@click.argument('collection') @click.argument("collection")
@click.option('--repair-unsafe-uid/--no-repair-unsafe-uid', default=False, @click.option(
help=('Some characters in item UIDs and URLs may cause problems ' "--repair-unsafe-uid/--no-repair-unsafe-uid",
'with buggy software. Adding this option will reassign ' default=False,
'new UIDs to those items. This is disabled by default, ' help=(
'which is equivalent to `--no-repair-unsafe-uid`.')) "Some characters in item UIDs and URLs may cause problems "
"with buggy software. Adding this option will reassign "
"new UIDs to those items. This is disabled by default, "
"which is equivalent to `--no-repair-unsafe-uid`."
),
)
@pass_context @pass_context
@catch_errors @catch_errors
def repair(ctx, collection, repair_unsafe_uid): def repair(ctx, collection, repair_unsafe_uid):
''' """
Repair a given collection. Repair a given collection.
Runs a few checks on the collection and applies some fixes to individual Runs a few checks on the collection and applies some fixes to individual
@ -234,12 +265,13 @@ def repair(ctx, collection, repair_unsafe_uid):
\b\bExamples: \b\bExamples:
# Repair the `foo` collection of the `calendars_local` storage # Repair the `foo` collection of the `calendars_local` storage
vdirsyncer repair calendars_local/foo vdirsyncer repair calendars_local/foo
''' """
from .tasks import repair_collection from .tasks import repair_collection
cli_logger.warning('This operation will take a very long time.') cli_logger.warning("This operation will take a very long time.")
cli_logger.warning('It\'s recommended to make a backup and ' cli_logger.warning(
'turn off other client\'s synchronization features.') "It's recommended to make a backup and "
click.confirm('Do you want to continue?', abort=True) "turn off other client's synchronization features."
repair_collection(ctx.config, collection, )
repair_unsafe_uid=repair_unsafe_uid) click.confirm("Do you want to continue?", abort=True)
repair_collection(ctx.config, collection, repair_unsafe_uid=repair_unsafe_uid)

View file

@ -14,19 +14,20 @@ from .fetchparams import expand_fetch_params
from .utils import storage_class_from_config from .utils import storage_class_from_config
GENERAL_ALL = frozenset(['status_path']) GENERAL_ALL = frozenset(["status_path"])
GENERAL_REQUIRED = frozenset(['status_path']) GENERAL_REQUIRED = frozenset(["status_path"])
SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, '_')) SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_"))
def validate_section_name(name, section_type): def validate_section_name(name, section_type):
invalid = set(name) - SECTION_NAME_CHARS invalid = set(name) - SECTION_NAME_CHARS
if invalid: if invalid:
chars_display = ''.join(sorted(SECTION_NAME_CHARS)) chars_display = "".join(sorted(SECTION_NAME_CHARS))
raise exceptions.UserError( raise exceptions.UserError(
'The {}-section "{}" contains invalid characters. Only ' 'The {}-section "{}" contains invalid characters. Only '
'the following characters are allowed for storage and ' "the following characters are allowed for storage and "
'pair names:\n{}'.format(section_type, name, chars_display)) "pair names:\n{}".format(section_type, name, chars_display)
)
def _validate_general_section(general_config): def _validate_general_section(general_config):
@ -35,18 +36,21 @@ def _validate_general_section(general_config):
problems = [] problems = []
if invalid: if invalid:
problems.append('general section doesn\'t take the parameters: {}' problems.append(
.format(', '.join(invalid))) "general section doesn't take the parameters: {}".format(", ".join(invalid))
)
if missing: if missing:
problems.append('general section is missing the parameters: {}' problems.append(
.format(', '.join(missing))) "general section is missing the parameters: {}".format(", ".join(missing))
)
if problems: if problems:
raise exceptions.UserError( raise exceptions.UserError(
'Invalid general section. Copy the example ' "Invalid general section. Copy the example "
'config from the repository and edit it: {}' "config from the repository and edit it: {}".format(PROJECT_HOME),
.format(PROJECT_HOME), problems=problems) problems=problems,
)
def _validate_collections_param(collections): def _validate_collections_param(collections):
@ -54,7 +58,7 @@ def _validate_collections_param(collections):
return return
if not isinstance(collections, list): if not isinstance(collections, list):
raise ValueError('`collections` parameter must be a list or `null`.') raise ValueError("`collections` parameter must be a list or `null`.")
collection_names = set() collection_names = set()
@ -64,7 +68,7 @@ def _validate_collections_param(collections):
collection_name = collection collection_name = collection
elif isinstance(collection, list): elif isinstance(collection, list):
e = ValueError( e = ValueError(
'Expected list of format ' "Expected list of format "
'["config_name", "storage_a_name", "storage_b_name"]' '["config_name", "storage_a_name", "storage_b_name"]'
) )
if len(collection) != 3: if len(collection) != 3:
@ -79,14 +83,15 @@ def _validate_collections_param(collections):
collection_name = collection[0] collection_name = collection[0]
else: else:
raise ValueError('Expected string or list of three strings.') raise ValueError("Expected string or list of three strings.")
if collection_name in collection_names: if collection_name in collection_names:
raise ValueError('Duplicate value.') raise ValueError("Duplicate value.")
collection_names.add(collection_name) collection_names.add(collection_name)
except ValueError as e: except ValueError as e:
raise ValueError('`collections` parameter, position {i}: {e}' raise ValueError(
.format(i=i, e=str(e))) "`collections` parameter, position {i}: {e}".format(i=i, e=str(e))
)
class _ConfigReader: class _ConfigReader:
@ -106,39 +111,38 @@ class _ConfigReader:
raise ValueError(f'Name "{name}" already used.') raise ValueError(f'Name "{name}" already used.')
self._seen_names.add(name) self._seen_names.add(name)
if section_type == 'general': if section_type == "general":
if self._general: if self._general:
raise ValueError('More than one general section.') raise ValueError("More than one general section.")
self._general = options self._general = options
elif section_type == 'storage': elif section_type == "storage":
self._storages[name] = options self._storages[name] = options
elif section_type == 'pair': elif section_type == "pair":
self._pairs[name] = options self._pairs[name] = options
else: else:
raise ValueError('Unknown section type.') raise ValueError("Unknown section type.")
def parse(self): def parse(self):
for section in self._parser.sections(): for section in self._parser.sections():
if ' ' in section: if " " in section:
section_type, name = section.split(' ', 1) section_type, name = section.split(" ", 1)
else: else:
section_type = name = section section_type = name = section
try: try:
self._parse_section( self._parse_section(
section_type, name, section_type,
dict(_parse_options(self._parser.items(section), name,
section=section)) dict(_parse_options(self._parser.items(section), section=section)),
) )
except ValueError as e: except ValueError as e:
raise exceptions.UserError( raise exceptions.UserError('Section "{}": {}'.format(section, str(e)))
'Section "{}": {}'.format(section, str(e)))
_validate_general_section(self._general) _validate_general_section(self._general)
if getattr(self._file, 'name', None): if getattr(self._file, "name", None):
self._general['status_path'] = os.path.join( self._general["status_path"] = os.path.join(
os.path.dirname(self._file.name), os.path.dirname(self._file.name),
expand_path(self._general['status_path']) expand_path(self._general["status_path"]),
) )
return self._general, self._pairs, self._storages return self._general, self._pairs, self._storages
@ -149,8 +153,7 @@ def _parse_options(items, section=None):
try: try:
yield key, json.loads(value) yield key, json.loads(value)
except ValueError as e: except ValueError as e:
raise ValueError('Section "{}", option "{}": {}' raise ValueError('Section "{}", option "{}": {}'.format(section, key, e))
.format(section, key, e))
class Config: class Config:
@ -158,14 +161,14 @@ class Config:
self.general = general self.general = general
self.storages = storages self.storages = storages
for name, options in storages.items(): for name, options in storages.items():
options['instance_name'] = name options["instance_name"] = name
self.pairs = {} self.pairs = {}
for name, options in pairs.items(): for name, options in pairs.items():
try: try:
self.pairs[name] = PairConfig(self, name, options) self.pairs[name] = PairConfig(self, name, options)
except ValueError as e: except ValueError as e:
raise exceptions.UserError(f'Pair {name}: {e}') raise exceptions.UserError(f"Pair {name}: {e}")
@classmethod @classmethod
def from_fileobject(cls, f): def from_fileobject(cls, f):
@ -175,21 +178,21 @@ class Config:
@classmethod @classmethod
def from_filename_or_environment(cls, fname=None): def from_filename_or_environment(cls, fname=None):
if fname is None: if fname is None:
fname = os.environ.get('VDIRSYNCER_CONFIG', None) fname = os.environ.get("VDIRSYNCER_CONFIG", None)
if fname is None: if fname is None:
fname = expand_path('~/.vdirsyncer/config') fname = expand_path("~/.vdirsyncer/config")
if not os.path.exists(fname): if not os.path.exists(fname):
xdg_config_dir = os.environ.get('XDG_CONFIG_HOME', xdg_config_dir = os.environ.get(
expand_path('~/.config/')) "XDG_CONFIG_HOME", expand_path("~/.config/")
fname = os.path.join(xdg_config_dir, 'vdirsyncer/config') )
fname = os.path.join(xdg_config_dir, "vdirsyncer/config")
try: try:
with open(fname) as f: with open(fname) as f:
return cls.from_fileobject(f) return cls.from_fileobject(f)
except Exception as e: except Exception as e:
raise exceptions.UserError( raise exceptions.UserError(
'Error during reading config {}: {}' "Error during reading config {}: {}".format(fname, e)
.format(fname, e)
) )
def get_storage_args(self, storage_name): def get_storage_args(self, storage_name):
@ -197,9 +200,10 @@ class Config:
args = self.storages[storage_name] args = self.storages[storage_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
'Storage {!r} not found. ' "Storage {!r} not found. "
'These are the configured storages: {}' "These are the configured storages: {}".format(
.format(storage_name, list(self.storages)) storage_name, list(self.storages)
)
) )
else: else:
return expand_fetch_params(args) return expand_fetch_params(args)
@ -215,50 +219,53 @@ class PairConfig:
def __init__(self, full_config, name, options): def __init__(self, full_config, name, options):
self._config = full_config self._config = full_config
self.name = name self.name = name
self.name_a = options.pop('a') self.name_a = options.pop("a")
self.name_b = options.pop('b') self.name_b = options.pop("b")
self._partial_sync = options.pop('partial_sync', None) self._partial_sync = options.pop("partial_sync", None)
self.metadata = options.pop('metadata', None) or () self.metadata = options.pop("metadata", None) or ()
self.conflict_resolution = \ self.conflict_resolution = self._process_conflict_resolution_param(
self._process_conflict_resolution_param( options.pop("conflict_resolution", None)
options.pop('conflict_resolution', None)) )
try: try:
self.collections = options.pop('collections') self.collections = options.pop("collections")
except KeyError: except KeyError:
raise ValueError( raise ValueError(
'collections parameter missing.\n\n' "collections parameter missing.\n\n"
'As of 0.9.0 this parameter has no default anymore. ' "As of 0.9.0 this parameter has no default anymore. "
'Set `collections = null` explicitly in your pair config.' "Set `collections = null` explicitly in your pair config."
) )
else: else:
_validate_collections_param(self.collections) _validate_collections_param(self.collections)
if options: if options:
raise ValueError('Unknown options: {}'.format(', '.join(options))) raise ValueError("Unknown options: {}".format(", ".join(options)))
def _process_conflict_resolution_param(self, conflict_resolution): def _process_conflict_resolution_param(self, conflict_resolution):
if conflict_resolution in (None, 'a wins', 'b wins'): if conflict_resolution in (None, "a wins", "b wins"):
return conflict_resolution return conflict_resolution
elif isinstance(conflict_resolution, list) and \ elif (
len(conflict_resolution) > 1 and \ isinstance(conflict_resolution, list)
conflict_resolution[0] == 'command': and len(conflict_resolution) > 1
and conflict_resolution[0] == "command"
):
def resolve(a, b): def resolve(a, b):
a_name = self.config_a['instance_name'] a_name = self.config_a["instance_name"]
b_name = self.config_b['instance_name'] b_name = self.config_b["instance_name"]
command = conflict_resolution[1:] command = conflict_resolution[1:]
def inner(): def inner():
return _resolve_conflict_via_command(a, b, command, a_name, return _resolve_conflict_via_command(a, b, command, a_name, b_name)
b_name)
ui_worker = get_ui_worker() ui_worker = get_ui_worker()
return ui_worker.put(inner) return ui_worker.put(inner)
return resolve return resolve
else: else:
raise ValueError('Invalid value for `conflict_resolution`.') raise ValueError("Invalid value for `conflict_resolution`.")
# The following parameters are lazily evaluated because evaluating # The following parameters are lazily evaluated because evaluating
# self.config_a would expand all `x.fetch` parameters. This is costly and # self.config_a would expand all `x.fetch` parameters. This is costly and
@ -282,21 +289,23 @@ class PairConfig:
cls_a, _ = storage_class_from_config(self.config_a) cls_a, _ = storage_class_from_config(self.config_a)
cls_b, _ = storage_class_from_config(self.config_b) cls_b, _ = storage_class_from_config(self.config_b)
if not cls_a.read_only and \ if (
not self.config_a.get('read_only', False) and \ not cls_a.read_only
not cls_b.read_only and \ and not self.config_a.get("read_only", False)
not self.config_b.get('read_only', False): and not cls_b.read_only
and not self.config_b.get("read_only", False)
):
raise exceptions.UserError( raise exceptions.UserError(
'`partial_sync` is only effective if one storage is ' "`partial_sync` is only effective if one storage is "
'read-only. Use `read_only = true` in exactly one storage ' "read-only. Use `read_only = true` in exactly one storage "
'section.' "section."
) )
if partial_sync is None: if partial_sync is None:
partial_sync = 'revert' partial_sync = "revert"
if partial_sync not in ('ignore', 'revert', 'error'): if partial_sync not in ("ignore", "revert", "error"):
raise exceptions.UserError('Invalid value for `partial_sync`.') raise exceptions.UserError("Invalid value for `partial_sync`.")
return partial_sync return partial_sync
@ -314,8 +323,7 @@ class CollectionConfig:
load_config = Config.from_filename_or_environment load_config = Config.from_filename_or_environment
def _resolve_conflict_via_command(a, b, command, a_name, b_name, def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None):
_check_call=None):
import tempfile import tempfile
import shutil import shutil
@ -324,14 +332,14 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name,
from ..vobject import Item from ..vobject import Item
dir = tempfile.mkdtemp(prefix='vdirsyncer-conflict.') dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
try: try:
a_tmp = os.path.join(dir, a_name) a_tmp = os.path.join(dir, a_name)
b_tmp = os.path.join(dir, b_name) b_tmp = os.path.join(dir, b_name)
with open(a_tmp, 'w') as f: with open(a_tmp, "w") as f:
f.write(a.raw) f.write(a.raw)
with open(b_tmp, 'w') as f: with open(b_tmp, "w") as f:
f.write(b.raw) f.write(b.raw)
command[0] = expand_path(command[0]) command[0] = expand_path(command[0])
@ -343,8 +351,7 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name,
new_b = f.read() new_b = f.read()
if new_a != new_b: if new_a != new_b:
raise exceptions.UserError('The two files are not completely ' raise exceptions.UserError("The two files are not completely " "equal.")
'equal.')
return Item(new_a) return Item(new_a)
finally: finally:
shutil.rmtree(dir) shutil.rmtree(dir)

View file

@ -22,19 +22,21 @@ logger = logging.getLogger(__name__)
def _get_collections_cache_key(pair): def _get_collections_cache_key(pair):
m = hashlib.sha256() m = hashlib.sha256()
j = json.dumps([ j = json.dumps(
DISCOVERY_CACHE_VERSION, [
pair.collections, DISCOVERY_CACHE_VERSION,
pair.config_a, pair.collections,
pair.config_b, pair.config_a,
], sort_keys=True) pair.config_b,
m.update(j.encode('utf-8')) ],
sort_keys=True,
)
m.update(j.encode("utf-8"))
return m.hexdigest() return m.hexdigest()
def collections_for_pair(status_path, pair, from_cache=True, def collections_for_pair(status_path, pair, from_cache=True, list_collections=False):
list_collections=False): """Determine all configured collections for a given pair. Takes care of
'''Determine all configured collections for a given pair. Takes care of
shortcut expansion and result caching. shortcut expansion and result caching.
:param status_path: The path to the status directory. :param status_path: The path to the status directory.
@ -42,55 +44,62 @@ def collections_for_pair(status_path, pair, from_cache=True,
discover and save to cache. discover and save to cache.
:returns: iterable of (collection, (a_args, b_args)) :returns: iterable of (collection, (a_args, b_args))
''' """
cache_key = _get_collections_cache_key(pair) cache_key = _get_collections_cache_key(pair)
if from_cache: if from_cache:
rv = load_status(status_path, pair.name, data_type='collections') rv = load_status(status_path, pair.name, data_type="collections")
if rv and rv.get('cache_key', None) == cache_key: if rv and rv.get("cache_key", None) == cache_key:
return list(_expand_collections_cache( return list(
rv['collections'], pair.config_a, pair.config_b _expand_collections_cache(
)) rv["collections"], pair.config_a, pair.config_b
)
)
elif rv: elif rv:
raise exceptions.UserError('Detected change in config file, ' raise exceptions.UserError(
'please run `vdirsyncer discover {}`.' "Detected change in config file, "
.format(pair.name)) "please run `vdirsyncer discover {}`.".format(pair.name)
)
else: else:
raise exceptions.UserError('Please run `vdirsyncer discover {}` ' raise exceptions.UserError(
' before synchronization.' "Please run `vdirsyncer discover {}` "
.format(pair.name)) " before synchronization.".format(pair.name)
)
logger.info('Discovering collections for pair {}' .format(pair.name)) logger.info("Discovering collections for pair {}".format(pair.name))
a_discovered = _DiscoverResult(pair.config_a) a_discovered = _DiscoverResult(pair.config_a)
b_discovered = _DiscoverResult(pair.config_b) b_discovered = _DiscoverResult(pair.config_b)
if list_collections: if list_collections:
_print_collections(pair.config_a['instance_name'], _print_collections(pair.config_a["instance_name"], a_discovered.get_self)
a_discovered.get_self) _print_collections(pair.config_b["instance_name"], b_discovered.get_self)
_print_collections(pair.config_b['instance_name'],
b_discovered.get_self)
# We have to use a list here because the special None/null value would get # We have to use a list here because the special None/null value would get
# mangled to string (because JSON objects always have string keys). # mangled to string (because JSON objects always have string keys).
rv = list(expand_collections( rv = list(
shortcuts=pair.collections, expand_collections(
config_a=pair.config_a, shortcuts=pair.collections,
config_b=pair.config_b, config_a=pair.config_a,
get_a_discovered=a_discovered.get_self, config_b=pair.config_b,
get_b_discovered=b_discovered.get_self, get_a_discovered=a_discovered.get_self,
_handle_collection_not_found=handle_collection_not_found get_b_discovered=b_discovered.get_self,
)) _handle_collection_not_found=handle_collection_not_found,
)
)
_sanity_check_collections(rv) _sanity_check_collections(rv)
save_status(status_path, pair.name, data_type='collections', save_status(
data={ status_path,
'collections': list( pair.name,
_compress_collections_cache(rv, pair.config_a, data_type="collections",
pair.config_b) data={
), "collections": list(
'cache_key': cache_key _compress_collections_cache(rv, pair.config_a, pair.config_b)
}) ),
"cache_key": cache_key,
},
)
return rv return rv
@ -141,25 +150,31 @@ class _DiscoverResult:
except Exception: except Exception:
return handle_storage_init_error(self._cls, self._config) return handle_storage_init_error(self._cls, self._config)
else: else:
storage_type = self._config['type'] storage_type = self._config["type"]
rv = {} rv = {}
for args in discovered: for args in discovered:
args['type'] = storage_type args["type"] = storage_type
rv[args['collection']] = args rv[args["collection"]] = args
return rv return rv
def expand_collections(shortcuts, config_a, config_b, get_a_discovered, def expand_collections(
get_b_discovered, _handle_collection_not_found): shortcuts,
config_a,
config_b,
get_a_discovered,
get_b_discovered,
_handle_collection_not_found,
):
handled_collections = set() handled_collections = set()
if shortcuts is None: if shortcuts is None:
shortcuts = [None] shortcuts = [None]
for shortcut in shortcuts: for shortcut in shortcuts:
if shortcut == 'from a': if shortcut == "from a":
collections = get_a_discovered() collections = get_a_discovered()
elif shortcut == 'from b': elif shortcut == "from b":
collections = get_b_discovered() collections = get_b_discovered()
else: else:
collections = [shortcut] collections = [shortcut]
@ -175,22 +190,21 @@ def expand_collections(shortcuts, config_a, config_b, get_a_discovered,
handled_collections.add(collection) handled_collections.add(collection)
a_args = _collection_from_discovered( a_args = _collection_from_discovered(
get_a_discovered, collection_a, config_a, get_a_discovered, collection_a, config_a, _handle_collection_not_found
_handle_collection_not_found
) )
b_args = _collection_from_discovered( b_args = _collection_from_discovered(
get_b_discovered, collection_b, config_b, get_b_discovered, collection_b, config_b, _handle_collection_not_found
_handle_collection_not_found
) )
yield collection, (a_args, b_args) yield collection, (a_args, b_args)
def _collection_from_discovered(get_discovered, collection, config, def _collection_from_discovered(
_handle_collection_not_found): get_discovered, collection, config, _handle_collection_not_found
):
if collection is None: if collection is None:
args = dict(config) args = dict(config)
args['collection'] = None args["collection"] = None
return args return args
try: try:
@ -209,26 +223,31 @@ def _print_collections(instance_name, get_discovered):
# UserError), we don't even know if the storage supports discovery # UserError), we don't even know if the storage supports discovery
# properly. So we can't abort. # properly. So we can't abort.
import traceback import traceback
logger.debug(''.join(traceback.format_tb(sys.exc_info()[2])))
logger.warning('Failed to discover collections for {}, use `-vdebug` ' logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
'to see the full traceback.'.format(instance_name)) logger.warning(
"Failed to discover collections for {}, use `-vdebug` "
"to see the full traceback.".format(instance_name)
)
return return
logger.info(f'{instance_name}:') logger.info(f"{instance_name}:")
for args in discovered.values(): for args in discovered.values():
collection = args['collection'] collection = args["collection"]
if collection is None: if collection is None:
continue continue
args['instance_name'] = instance_name args["instance_name"] = instance_name
try: try:
storage = storage_instance_from_config(args, create=False) storage = storage_instance_from_config(args, create=False)
displayname = storage.get_meta('displayname') displayname = storage.get_meta("displayname")
except Exception: except Exception:
displayname = '' displayname = ""
logger.info(' - {}{}'.format( logger.info(
json.dumps(collection), " - {}{}".format(
f' ("{displayname}")' json.dumps(collection),
if displayname and displayname != collection f' ("{displayname}")'
else '' if displayname and displayname != collection
)) else "",
)
)

View file

@ -7,7 +7,7 @@ from .. import exceptions
from ..utils import expand_path from ..utils import expand_path
from ..utils import synchronized from ..utils import synchronized
SUFFIX = '.fetch' SUFFIX = ".fetch"
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -18,9 +18,9 @@ def expand_fetch_params(config):
if not key.endswith(SUFFIX): if not key.endswith(SUFFIX):
continue continue
newkey = key[:-len(SUFFIX)] newkey = key[: -len(SUFFIX)]
if newkey in config: if newkey in config:
raise ValueError(f'Can\'t set {key} and {newkey}.') raise ValueError(f"Can't set {key} and {newkey}.")
config[newkey] = _fetch_value(config[key], key) config[newkey] = _fetch_value(config[key], key)
del config[key] del config[key]
@ -30,10 +30,11 @@ def expand_fetch_params(config):
@synchronized() @synchronized()
def _fetch_value(opts, key): def _fetch_value(opts, key):
if not isinstance(opts, list): if not isinstance(opts, list):
raise ValueError('Invalid value for {}: Expected a list, found {!r}.' raise ValueError(
.format(key, opts)) "Invalid value for {}: Expected a list, found {!r}.".format(key, opts)
)
if not opts: if not opts:
raise ValueError('Expected list of length > 0.') raise ValueError("Expected list of length > 0.")
try: try:
ctx = click.get_current_context().find_object(AppContext) ctx = click.get_current_context().find_object(AppContext)
@ -46,7 +47,7 @@ def _fetch_value(opts, key):
cache_key = tuple(opts) cache_key = tuple(opts)
if cache_key in password_cache: if cache_key in password_cache:
rv = password_cache[cache_key] rv = password_cache[cache_key]
logger.debug(f'Found cached value for {opts!r}.') logger.debug(f"Found cached value for {opts!r}.")
if isinstance(rv, BaseException): if isinstance(rv, BaseException):
raise rv raise rv
return rv return rv
@ -55,10 +56,9 @@ def _fetch_value(opts, key):
try: try:
strategy_fn = STRATEGIES[strategy] strategy_fn = STRATEGIES[strategy]
except KeyError: except KeyError:
raise exceptions.UserError(f'Unknown strategy: {strategy}') raise exceptions.UserError(f"Unknown strategy: {strategy}")
logger.debug('Fetching value for {} with {} strategy.' logger.debug("Fetching value for {} with {} strategy.".format(key, strategy))
.format(key, strategy))
try: try:
rv = strategy_fn(*opts[1:]) rv = strategy_fn(*opts[1:])
except (click.Abort, KeyboardInterrupt) as e: except (click.Abort, KeyboardInterrupt) as e:
@ -66,22 +66,25 @@ def _fetch_value(opts, key):
raise raise
else: else:
if not rv: if not rv:
raise exceptions.UserError('Empty value for {}, this most likely ' raise exceptions.UserError(
'indicates an error.' "Empty value for {}, this most likely "
.format(key)) "indicates an error.".format(key)
)
password_cache[cache_key] = rv password_cache[cache_key] = rv
return rv return rv
def _strategy_command(*command): def _strategy_command(*command):
import subprocess import subprocess
command = (expand_path(command[0]),) + command[1:] command = (expand_path(command[0]),) + command[1:]
try: try:
stdout = subprocess.check_output(command, universal_newlines=True) stdout = subprocess.check_output(command, universal_newlines=True)
return stdout.strip('\n') return stdout.strip("\n")
except OSError as e: except OSError as e:
raise exceptions.UserError('Failed to execute command: {}\n{}' raise exceptions.UserError(
.format(' '.join(command), str(e))) "Failed to execute command: {}\n{}".format(" ".join(command), str(e))
)
def _strategy_prompt(text): def _strategy_prompt(text):
@ -89,6 +92,6 @@ def _strategy_prompt(text):
STRATEGIES = { STRATEGIES = {
'command': _strategy_command, "command": _strategy_command,
'prompt': _strategy_prompt, "prompt": _strategy_prompt,
} }

View file

@ -19,28 +19,30 @@ from .utils import save_status
def prepare_pair(wq, pair_name, collections, config, callback, **kwargs): def prepare_pair(wq, pair_name, collections, config, callback, **kwargs):
pair = config.get_pair(pair_name) pair = config.get_pair(pair_name)
all_collections = dict(collections_for_pair( all_collections = dict(
status_path=config.general['status_path'], pair=pair collections_for_pair(status_path=config.general["status_path"], pair=pair)
)) )
# spawn one worker less because we can reuse the current one # spawn one worker less because we can reuse the current one
new_workers = -1 new_workers = -1
for collection_name in (collections or all_collections): for collection_name in collections or all_collections:
try: try:
config_a, config_b = all_collections[collection_name] config_a, config_b = all_collections[collection_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
'Pair {}: Collection {} not found. These are the ' "Pair {}: Collection {} not found. These are the "
'configured collections:\n{}' "configured collections:\n{}".format(
.format(pair_name, pair_name, json.dumps(collection_name), list(all_collections)
json.dumps(collection_name), )
list(all_collections))) )
new_workers += 1 new_workers += 1
collection = CollectionConfig(pair, collection_name, config_a, collection = CollectionConfig(pair, collection_name, config_a, config_b)
config_b) wq.put(
wq.put(functools.partial(callback, collection=collection, functools.partial(
general=config.general, **kwargs)) callback, collection=collection, general=config.general, **kwargs
)
)
for _ in range(new_workers): for _ in range(new_workers):
wq.spawn_worker() wq.spawn_worker()
@ -51,7 +53,7 @@ def sync_collection(wq, collection, general, force_delete):
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)
try: try:
cli_logger.info(f'Syncing {status_name}') cli_logger.info(f"Syncing {status_name}")
a = storage_instance_from_config(collection.config_a) a = storage_instance_from_config(collection.config_a)
b = storage_instance_from_config(collection.config_b) b = storage_instance_from_config(collection.config_b)
@ -63,14 +65,17 @@ def sync_collection(wq, collection, general, force_delete):
sync_failed = True sync_failed = True
handle_cli_error(status_name, e) handle_cli_error(status_name, e)
with manage_sync_status(general['status_path'], pair.name, with manage_sync_status(
collection.name) as status: general["status_path"], pair.name, collection.name
) as status:
sync.sync( sync.sync(
a, b, status, a,
b,
status,
conflict_resolution=pair.conflict_resolution, conflict_resolution=pair.conflict_resolution,
force_delete=force_delete, force_delete=force_delete,
error_callback=error_callback, error_callback=error_callback,
partial_sync=pair.partial_sync partial_sync=pair.partial_sync,
) )
if sync_failed: if sync_failed:
@ -87,62 +92,76 @@ def discover_collections(wq, pair, **kwargs):
collections = list(c for c, (a, b) in rv) collections = list(c for c, (a, b) in rv)
if collections == [None]: if collections == [None]:
collections = None collections = None
cli_logger.info('Saved for {}: collections = {}' cli_logger.info(
.format(pair.name, json.dumps(collections))) "Saved for {}: collections = {}".format(pair.name, json.dumps(collections))
)
def repair_collection(config, collection, repair_unsafe_uid): def repair_collection(config, collection, repair_unsafe_uid):
from ..repair import repair_storage from ..repair import repair_storage
storage_name, collection = collection, None storage_name, collection = collection, None
if '/' in storage_name: if "/" in storage_name:
storage_name, collection = storage_name.split('/') storage_name, collection = storage_name.split("/")
config = config.get_storage_args(storage_name) config = config.get_storage_args(storage_name)
storage_type = config['type'] storage_type = config["type"]
if collection is not None: if collection is not None:
cli_logger.info('Discovering collections (skipping cache).') cli_logger.info("Discovering collections (skipping cache).")
cls, config = storage_class_from_config(config) cls, config = storage_class_from_config(config)
for config in cls.discover(**config): for config in cls.discover(**config):
if config['collection'] == collection: if config["collection"] == collection:
break break
else: else:
raise exceptions.UserError( raise exceptions.UserError(
'Couldn\'t find collection {} for storage {}.' "Couldn't find collection {} for storage {}.".format(
.format(collection, storage_name) collection, storage_name
)
) )
config['type'] = storage_type config["type"] = storage_type
storage = storage_instance_from_config(config) storage = storage_instance_from_config(config)
cli_logger.info(f'Repairing {storage_name}/{collection}') cli_logger.info(f"Repairing {storage_name}/{collection}")
cli_logger.warning('Make sure no other program is talking to the server.') cli_logger.warning("Make sure no other program is talking to the server.")
repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid) repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid)
def metasync_collection(wq, collection, general): def metasync_collection(wq, collection, general):
from ..metasync import metasync from ..metasync import metasync
pair = collection.pair pair = collection.pair
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)
try: try:
cli_logger.info(f'Metasyncing {status_name}') cli_logger.info(f"Metasyncing {status_name}")
status = load_status(general['status_path'], pair.name, status = (
collection.name, data_type='metadata') or {} load_status(
general["status_path"], pair.name, collection.name, data_type="metadata"
)
or {}
)
a = storage_instance_from_config(collection.config_a) a = storage_instance_from_config(collection.config_a)
b = storage_instance_from_config(collection.config_b) b = storage_instance_from_config(collection.config_b)
metasync( metasync(
a, b, status, a,
b,
status,
conflict_resolution=pair.conflict_resolution, conflict_resolution=pair.conflict_resolution,
keys=pair.metadata keys=pair.metadata,
) )
except BaseException: except BaseException:
handle_cli_error(status_name) handle_cli_error(status_name)
raise JobFailed() raise JobFailed()
save_status(general['status_path'], pair.name, collection.name, save_status(
data_type='metadata', data=status) general["status_path"],
pair.name,
collection.name,
data_type="metadata",
data=status,
)

View file

@ -31,15 +31,15 @@ STATUS_DIR_PERMISSIONS = 0o700
class _StorageIndex: class _StorageIndex:
def __init__(self): def __init__(self):
self._storages = dict( self._storages = dict(
caldav='vdirsyncer.storage.dav.CalDAVStorage', caldav="vdirsyncer.storage.dav.CalDAVStorage",
carddav='vdirsyncer.storage.dav.CardDAVStorage', carddav="vdirsyncer.storage.dav.CardDAVStorage",
filesystem='vdirsyncer.storage.filesystem.FilesystemStorage', filesystem="vdirsyncer.storage.filesystem.FilesystemStorage",
http='vdirsyncer.storage.http.HttpStorage', http="vdirsyncer.storage.http.HttpStorage",
singlefile='vdirsyncer.storage.singlefile.SingleFileStorage', singlefile="vdirsyncer.storage.singlefile.SingleFileStorage",
google_calendar='vdirsyncer.storage.google.GoogleCalendarStorage', google_calendar="vdirsyncer.storage.google.GoogleCalendarStorage",
google_contacts='vdirsyncer.storage.google.GoogleContactsStorage', google_contacts="vdirsyncer.storage.google.GoogleContactsStorage",
etesync_calendars='vdirsyncer.storage.etesync.EtesyncCalendars', etesync_calendars="vdirsyncer.storage.etesync.EtesyncCalendars",
etesync_contacts='vdirsyncer.storage.etesync.EtesyncContacts' etesync_contacts="vdirsyncer.storage.etesync.EtesyncContacts",
) )
def __getitem__(self, name): def __getitem__(self, name):
@ -47,7 +47,7 @@ class _StorageIndex:
if not isinstance(item, str): if not isinstance(item, str):
return item return item
modname, clsname = item.rsplit('.', 1) modname, clsname = item.rsplit(".", 1)
mod = importlib.import_module(modname) mod = importlib.import_module(modname)
self._storages[name] = rv = getattr(mod, clsname) self._storages[name] = rv = getattr(mod, clsname)
assert rv.storage_name == name assert rv.storage_name == name
@ -63,12 +63,12 @@ class JobFailed(RuntimeError):
def handle_cli_error(status_name=None, e=None): def handle_cli_error(status_name=None, e=None):
''' """
Print a useful error message for the current exception. Print a useful error message for the current exception.
This is supposed to catch all exceptions, and should never raise any This is supposed to catch all exceptions, and should never raise any
exceptions itself. exceptions itself.
''' """
try: try:
if e is not None: if e is not None:
@ -80,101 +80,104 @@ def handle_cli_error(status_name=None, e=None):
except StorageEmpty as e: except StorageEmpty as e:
cli_logger.error( cli_logger.error(
'{status_name}: Storage "{name}" was completely emptied. If you ' '{status_name}: Storage "{name}" was completely emptied. If you '
'want to delete ALL entries on BOTH sides, then use ' "want to delete ALL entries on BOTH sides, then use "
'`vdirsyncer sync --force-delete {status_name}`. ' "`vdirsyncer sync --force-delete {status_name}`. "
'Otherwise delete the files for {status_name} in your status ' "Otherwise delete the files for {status_name} in your status "
'directory.'.format( "directory.".format(
name=e.empty_storage.instance_name, name=e.empty_storage.instance_name, status_name=status_name
status_name=status_name
) )
) )
except PartialSync as e: except PartialSync as e:
cli_logger.error( cli_logger.error(
'{status_name}: Attempted change on {storage}, which is read-only' "{status_name}: Attempted change on {storage}, which is read-only"
'. Set `partial_sync` in your pair section to `ignore` to ignore ' ". Set `partial_sync` in your pair section to `ignore` to ignore "
'those changes, or `revert` to revert them on the other side.' "those changes, or `revert` to revert them on the other side.".format(
.format(status_name=status_name, storage=e.storage) status_name=status_name, storage=e.storage
)
) )
except SyncConflict as e: except SyncConflict as e:
cli_logger.error( cli_logger.error(
'{status_name}: One item changed on both sides. Resolve this ' "{status_name}: One item changed on both sides. Resolve this "
'conflict manually, or by setting the `conflict_resolution` ' "conflict manually, or by setting the `conflict_resolution` "
'parameter in your config file.\n' "parameter in your config file.\n"
'See also {docs}/config.html#pair-section\n' "See also {docs}/config.html#pair-section\n"
'Item ID: {e.ident}\n' "Item ID: {e.ident}\n"
'Item href on side A: {e.href_a}\n' "Item href on side A: {e.href_a}\n"
'Item href on side B: {e.href_b}\n' "Item href on side B: {e.href_b}\n".format(
.format(status_name=status_name, e=e, docs=DOCS_HOME) status_name=status_name, e=e, docs=DOCS_HOME
)
) )
except IdentConflict as e: except IdentConflict as e:
cli_logger.error( cli_logger.error(
'{status_name}: Storage "{storage.instance_name}" contains ' '{status_name}: Storage "{storage.instance_name}" contains '
'multiple items with the same UID or even content. Vdirsyncer ' "multiple items with the same UID or even content. Vdirsyncer "
'will now abort the synchronization of this collection, because ' "will now abort the synchronization of this collection, because "
'the fix for this is not clear; It could be the result of a badly ' "the fix for this is not clear; It could be the result of a badly "
'behaving server. You can try running:\n\n' "behaving server. You can try running:\n\n"
' vdirsyncer repair {storage.instance_name}\n\n' " vdirsyncer repair {storage.instance_name}\n\n"
'But make sure to have a backup of your data in some form. The ' "But make sure to have a backup of your data in some form. The "
'offending hrefs are:\n\n{href_list}\n' "offending hrefs are:\n\n{href_list}\n".format(
.format(status_name=status_name, status_name=status_name,
storage=e.storage, storage=e.storage,
href_list='\n'.join(map(repr, e.hrefs))) href_list="\n".join(map(repr, e.hrefs)),
)
) )
except (click.Abort, KeyboardInterrupt, JobFailed): except (click.Abort, KeyboardInterrupt, JobFailed):
pass pass
except exceptions.PairNotFound as e: except exceptions.PairNotFound as e:
cli_logger.error( cli_logger.error(
'Pair {pair_name} does not exist. Please check your ' "Pair {pair_name} does not exist. Please check your "
'configuration file and make sure you\'ve typed the pair name ' "configuration file and make sure you've typed the pair name "
'correctly'.format(pair_name=e.pair_name) "correctly".format(pair_name=e.pair_name)
) )
except exceptions.InvalidResponse as e: except exceptions.InvalidResponse as e:
cli_logger.error( cli_logger.error(
'The server returned something vdirsyncer doesn\'t understand. ' "The server returned something vdirsyncer doesn't understand. "
'Error message: {!r}\n' "Error message: {!r}\n"
'While this is most likely a serverside problem, the vdirsyncer ' "While this is most likely a serverside problem, the vdirsyncer "
'devs are generally interested in such bugs. Please report it in ' "devs are generally interested in such bugs. Please report it in "
'the issue tracker at {}' "the issue tracker at {}".format(e, BUGTRACKER_HOME)
.format(e, BUGTRACKER_HOME)
) )
except exceptions.CollectionRequired: except exceptions.CollectionRequired:
cli_logger.error( cli_logger.error(
'One or more storages don\'t support `collections = null`. ' "One or more storages don't support `collections = null`. "
'You probably want to set `collections = ["from a", "from b"]`.' 'You probably want to set `collections = ["from a", "from b"]`.'
) )
except Exception as e: except Exception as e:
tb = sys.exc_info()[2] tb = sys.exc_info()[2]
import traceback import traceback
tb = traceback.format_tb(tb) tb = traceback.format_tb(tb)
if status_name: if status_name:
msg = f'Unknown error occurred for {status_name}' msg = f"Unknown error occurred for {status_name}"
else: else:
msg = 'Unknown error occurred' msg = "Unknown error occurred"
msg += f': {e}\nUse `-vdebug` to see the full traceback.' msg += f": {e}\nUse `-vdebug` to see the full traceback."
cli_logger.error(msg) cli_logger.error(msg)
cli_logger.debug(''.join(tb)) cli_logger.debug("".join(tb))
def get_status_name(pair, collection): def get_status_name(pair, collection):
if collection is None: if collection is None:
return pair return pair
return pair + '/' + collection return pair + "/" + collection
def get_status_path(base_path, pair, collection=None, data_type=None): def get_status_path(base_path, pair, collection=None, data_type=None):
assert data_type is not None assert data_type is not None
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) path = expand_path(os.path.join(base_path, status_name))
if os.path.isfile(path) and data_type == 'items': if os.path.isfile(path) and data_type == "items":
new_path = path + '.items' new_path = path + ".items"
# XXX: Legacy migration # XXX: Legacy migration
cli_logger.warning('Migrating statuses: Renaming {} to {}' cli_logger.warning(
.format(path, new_path)) "Migrating statuses: Renaming {} to {}".format(path, new_path)
)
os.rename(path, new_path) os.rename(path, new_path)
path += '.' + data_type path += "." + data_type
return path return path
@ -205,20 +208,20 @@ def prepare_status_path(path):
@contextlib.contextmanager @contextlib.contextmanager
def manage_sync_status(base_path, pair_name, collection_name): def manage_sync_status(base_path, pair_name, collection_name):
path = get_status_path(base_path, pair_name, collection_name, 'items') path = get_status_path(base_path, pair_name, collection_name, "items")
status = None status = None
legacy_status = None legacy_status = None
try: try:
# XXX: Legacy migration # XXX: Legacy migration
with open(path, 'rb') as f: with open(path, "rb") as f:
if f.read(1) == b'{': if f.read(1) == b"{":
f.seek(0) f.seek(0)
legacy_status = dict(json.load(f)) legacy_status = dict(json.load(f))
except (OSError, ValueError): except (OSError, ValueError):
pass pass
if legacy_status is not None: if legacy_status is not None:
cli_logger.warning('Migrating legacy status to sqlite') cli_logger.warning("Migrating legacy status to sqlite")
os.remove(path) os.remove(path)
status = SqliteStatus(path) status = SqliteStatus(path)
status.load_legacy_status(legacy_status) status.load_legacy_status(legacy_status)
@ -233,10 +236,10 @@ def save_status(base_path, pair, collection=None, data_type=None, data=None):
assert data_type is not None assert data_type is not None
assert data is not None assert data is not None
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) + '.' + data_type path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
prepare_status_path(path) prepare_status_path(path)
with atomic_write(path, mode='w', overwrite=True) as f: with atomic_write(path, mode="w", overwrite=True) as f:
json.dump(data, f) json.dump(data, f)
os.chmod(path, STATUS_PERMISSIONS) os.chmod(path, STATUS_PERMISSIONS)
@ -244,20 +247,19 @@ def save_status(base_path, pair, collection=None, data_type=None, data=None):
def storage_class_from_config(config): def storage_class_from_config(config):
config = dict(config) config = dict(config)
storage_name = config.pop('type') storage_name = config.pop("type")
try: try:
cls = storage_names[storage_name] cls = storage_names[storage_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(f"Unknown storage type: {storage_name}")
f'Unknown storage type: {storage_name}')
return cls, config return cls, config
def storage_instance_from_config(config, create=True): def storage_instance_from_config(config, create=True):
''' """
:param config: A configuration dictionary to pass as kwargs to the class :param config: A configuration dictionary to pass as kwargs to the class
corresponding to config['type'] corresponding to config['type']
''' """
cls, new_config = storage_class_from_config(config) cls, new_config = storage_class_from_config(config)
@ -266,7 +268,8 @@ def storage_instance_from_config(config, create=True):
except exceptions.CollectionNotFound as e: except exceptions.CollectionNotFound as e:
if create: if create:
config = handle_collection_not_found( config = handle_collection_not_found(
config, config.get('collection', None), e=str(e)) config, config.get("collection", None), e=str(e)
)
return storage_instance_from_config(config, create=False) return storage_instance_from_config(config, create=False)
else: else:
raise raise
@ -276,7 +279,7 @@ def storage_instance_from_config(config, create=True):
def handle_storage_init_error(cls, config): def handle_storage_init_error(cls, config):
e = sys.exc_info()[1] e = sys.exc_info()[1]
if not isinstance(e, TypeError) or '__init__' not in repr(e): if not isinstance(e, TypeError) or "__init__" not in repr(e):
raise raise
all, required = get_storage_init_args(cls) all, required = get_storage_init_args(cls)
@ -288,30 +291,34 @@ def handle_storage_init_error(cls, config):
if missing: if missing:
problems.append( problems.append(
'{} storage requires the parameters: {}' "{} storage requires the parameters: {}".format(
.format(cls.storage_name, ', '.join(missing))) cls.storage_name, ", ".join(missing)
)
)
if invalid: if invalid:
problems.append( problems.append(
'{} storage doesn\'t take the parameters: {}' "{} storage doesn't take the parameters: {}".format(
.format(cls.storage_name, ', '.join(invalid))) cls.storage_name, ", ".join(invalid)
)
)
if not problems: if not problems:
raise e raise e
raise exceptions.UserError( raise exceptions.UserError(
'Failed to initialize {}'.format(config['instance_name']), "Failed to initialize {}".format(config["instance_name"]), problems=problems
problems=problems
) )
class WorkerQueue: class WorkerQueue:
''' """
A simple worker-queue setup. A simple worker-queue setup.
Note that workers quit if queue is empty. That means you have to first put Note that workers quit if queue is empty. That means you have to first put
things into the queue before spawning the worker! things into the queue before spawning the worker!
''' """
def __init__(self, max_workers): def __init__(self, max_workers):
self._queue = queue.Queue() self._queue = queue.Queue()
self._workers = [] self._workers = []
@ -369,7 +376,7 @@ class WorkerQueue:
if not self._workers: if not self._workers:
# Ugly hack, needed because ui_worker is not running. # Ugly hack, needed because ui_worker is not running.
click.echo = _echo click.echo = _echo
cli_logger.critical('Nothing to do.') cli_logger.critical("Nothing to do.")
sys.exit(5) sys.exit(5)
ui_worker.run() ui_worker.run()
@ -381,8 +388,9 @@ class WorkerQueue:
tasks_done = next(self.num_done_tasks) tasks_done = next(self.num_done_tasks)
if tasks_failed > 0: if tasks_failed > 0:
cli_logger.error('{} out of {} tasks failed.' cli_logger.error(
.format(tasks_failed, tasks_done)) "{} out of {} tasks failed.".format(tasks_failed, tasks_done)
)
sys.exit(1) sys.exit(1)
def put(self, f): def put(self, f):
@ -392,25 +400,30 @@ class WorkerQueue:
def assert_permissions(path, wanted): def assert_permissions(path, wanted):
permissions = os.stat(path).st_mode & 0o777 permissions = os.stat(path).st_mode & 0o777
if permissions > wanted: if permissions > wanted:
cli_logger.warning('Correcting permissions of {} from {:o} to {:o}' cli_logger.warning(
.format(path, permissions, wanted)) "Correcting permissions of {} from {:o} to {:o}".format(
path, permissions, wanted
)
)
os.chmod(path, wanted) os.chmod(path, wanted)
def handle_collection_not_found(config, collection, e=None): def handle_collection_not_found(config, collection, e=None):
storage_name = config.get('instance_name', None) storage_name = config.get("instance_name", None)
cli_logger.warning('{}No collection {} found for storage {}.' cli_logger.warning(
.format(f'{e}\n' if e else '', "{}No collection {} found for storage {}.".format(
json.dumps(collection), storage_name)) f"{e}\n" if e else "", json.dumps(collection), storage_name
)
)
if click.confirm('Should vdirsyncer attempt to create it?'): if click.confirm("Should vdirsyncer attempt to create it?"):
storage_type = config['type'] storage_type = config["type"]
cls, config = storage_class_from_config(config) cls, config = storage_class_from_config(config)
config['collection'] = collection config["collection"] = collection
try: try:
args = cls.create_collection(**config) args = cls.create_collection(**config)
args['type'] = storage_type args["type"] = storage_type
return args return args
except NotImplementedError as e: except NotImplementedError as e:
cli_logger.error(e) cli_logger.error(e)
@ -418,5 +431,5 @@ def handle_collection_not_found(config, collection, e=None):
raise exceptions.UserError( raise exceptions.UserError(
'Unable to find or create collection "{collection}" for ' 'Unable to find or create collection "{collection}" for '
'storage "{storage}". Please create the collection ' 'storage "{storage}". Please create the collection '
'yourself.'.format(collection=collection, "yourself.".format(collection=collection, storage=storage_name)
storage=storage_name)) )

View file

@ -1,80 +1,81 @@
''' """
Contains exception classes used by vdirsyncer. Not all exceptions are here, Contains exception classes used by vdirsyncer. Not all exceptions are here,
only the most commonly used ones. only the most commonly used ones.
''' """
class Error(Exception): class Error(Exception):
'''Baseclass for all errors.''' """Baseclass for all errors."""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
for key, value in kwargs.items(): for key, value in kwargs.items():
if getattr(self, key, object()) is not None: # pragma: no cover if getattr(self, key, object()) is not None: # pragma: no cover
raise TypeError(f'Invalid argument: {key}') raise TypeError(f"Invalid argument: {key}")
setattr(self, key, value) setattr(self, key, value)
super().__init__(*args) super().__init__(*args)
class UserError(Error, ValueError): class UserError(Error, ValueError):
'''Wrapper exception to be used to signify the traceback should not be """Wrapper exception to be used to signify the traceback should not be
shown to the user.''' shown to the user."""
problems = None problems = None
def __str__(self): def __str__(self):
msg = Error.__str__(self) msg = Error.__str__(self)
for problem in self.problems or (): for problem in self.problems or ():
msg += f'\n - {problem}' msg += f"\n - {problem}"
return msg return msg
class CollectionNotFound(Error): class CollectionNotFound(Error):
'''Collection not found''' """Collection not found"""
class PairNotFound(Error): class PairNotFound(Error):
'''Pair not found''' """Pair not found"""
pair_name = None pair_name = None
class PreconditionFailed(Error): class PreconditionFailed(Error):
''' """
- The item doesn't exist although it should - The item doesn't exist although it should
- The item exists although it shouldn't - The item exists although it shouldn't
- The etags don't match. - The etags don't match.
Due to CalDAV we can't actually say which error it is. Due to CalDAV we can't actually say which error it is.
This error may indicate race conditions. This error may indicate race conditions.
''' """
class NotFoundError(PreconditionFailed): class NotFoundError(PreconditionFailed):
'''Item not found''' """Item not found"""
class AlreadyExistingError(PreconditionFailed): class AlreadyExistingError(PreconditionFailed):
'''Item already exists.''' """Item already exists."""
existing_href = None existing_href = None
class WrongEtagError(PreconditionFailed): class WrongEtagError(PreconditionFailed):
'''Wrong etag''' """Wrong etag"""
class ReadOnlyError(Error): class ReadOnlyError(Error):
'''Storage is read-only.''' """Storage is read-only."""
class InvalidResponse(Error, ValueError): class InvalidResponse(Error, ValueError):
'''The backend returned an invalid result.''' """The backend returned an invalid result."""
class UnsupportedMetadataError(Error, NotImplementedError): class UnsupportedMetadataError(Error, NotImplementedError):
'''The storage doesn't support this type of metadata.''' """The storage doesn't support this type of metadata."""
class CollectionRequired(Error): class CollectionRequired(Error):
'''`collection = null` is not allowed.''' """`collection = null` is not allowed."""

View file

@ -9,22 +9,23 @@ from .utils import expand_path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
USERAGENT = f'vdirsyncer/{__version__}' USERAGENT = f"vdirsyncer/{__version__}"
def _detect_faulty_requests(): # pragma: no cover def _detect_faulty_requests(): # pragma: no cover
text = ( text = (
'Error during import: {e}\n\n' "Error during import: {e}\n\n"
'If you have installed vdirsyncer from a distro package, please file ' "If you have installed vdirsyncer from a distro package, please file "
'a bug against that package, not vdirsyncer.\n\n' "a bug against that package, not vdirsyncer.\n\n"
'Consult {d}/problems.html#requests-related-importerrors' "Consult {d}/problems.html#requests-related-importerrors"
'-based-distributions on how to work around this.' "-based-distributions on how to work around this."
) )
try: try:
from requests_toolbelt.auth.guess import GuessAuth # noqa from requests_toolbelt.auth.guess import GuessAuth # noqa
except ImportError as e: except ImportError as e:
import sys import sys
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr) print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
sys.exit(1) sys.exit(1)
@ -35,28 +36,30 @@ del _detect_faulty_requests
def prepare_auth(auth, username, password): def prepare_auth(auth, username, password):
if username and password: if username and password:
if auth == 'basic' or auth is None: if auth == "basic" or auth is None:
return (username, password) return (username, password)
elif auth == 'digest': elif auth == "digest":
from requests.auth import HTTPDigestAuth from requests.auth import HTTPDigestAuth
return HTTPDigestAuth(username, password) return HTTPDigestAuth(username, password)
elif auth == 'guess': elif auth == "guess":
try: try:
from requests_toolbelt.auth.guess import GuessAuth from requests_toolbelt.auth.guess import GuessAuth
except ImportError: except ImportError:
raise exceptions.UserError( raise exceptions.UserError(
'Your version of requests_toolbelt is too ' "Your version of requests_toolbelt is too "
'old for `guess` authentication. At least ' "old for `guess` authentication. At least "
'version 0.4.0 is required.' "version 0.4.0 is required."
) )
else: else:
return GuessAuth(username, password) return GuessAuth(username, password)
else: else:
raise exceptions.UserError('Unknown authentication method: {}' raise exceptions.UserError("Unknown authentication method: {}".format(auth))
.format(auth))
elif auth: elif auth:
raise exceptions.UserError('You need to specify username and password ' raise exceptions.UserError(
'for {} authentication.'.format(auth)) "You need to specify username and password "
"for {} authentication.".format(auth)
)
else: else:
return None return None
@ -65,24 +68,26 @@ def prepare_verify(verify, verify_fingerprint):
if isinstance(verify, (str, bytes)): if isinstance(verify, (str, bytes)):
verify = expand_path(verify) verify = expand_path(verify)
elif not isinstance(verify, bool): elif not isinstance(verify, bool):
raise exceptions.UserError('Invalid value for verify ({}), ' raise exceptions.UserError(
'must be a path to a PEM-file or boolean.' "Invalid value for verify ({}), "
.format(verify)) "must be a path to a PEM-file or boolean.".format(verify)
)
if verify_fingerprint is not None: if verify_fingerprint is not None:
if not isinstance(verify_fingerprint, (bytes, str)): if not isinstance(verify_fingerprint, (bytes, str)):
raise exceptions.UserError('Invalid value for verify_fingerprint ' raise exceptions.UserError(
'({}), must be a string or null.' "Invalid value for verify_fingerprint "
.format(verify_fingerprint)) "({}), must be a string or null.".format(verify_fingerprint)
)
elif not verify: elif not verify:
raise exceptions.UserError( raise exceptions.UserError(
'Disabling all SSL validation is forbidden. Consider setting ' "Disabling all SSL validation is forbidden. Consider setting "
'verify_fingerprint if you have a broken or self-signed cert.' "verify_fingerprint if you have a broken or self-signed cert."
) )
return { return {
'verify': verify, "verify": verify,
'verify_fingerprint': verify_fingerprint, "verify_fingerprint": verify_fingerprint,
} }
@ -95,22 +100,24 @@ def prepare_client_cert(cert):
def _install_fingerprint_adapter(session, fingerprint): def _install_fingerprint_adapter(session, fingerprint):
prefix = 'https://' prefix = "https://"
try: try:
from requests_toolbelt.adapters.fingerprint import \ from requests_toolbelt.adapters.fingerprint import FingerprintAdapter
FingerprintAdapter
except ImportError: except ImportError:
raise RuntimeError('`verify_fingerprint` can only be used with ' raise RuntimeError(
'requests-toolbelt versions >= 0.4.0') "`verify_fingerprint` can only be used with "
"requests-toolbelt versions >= 0.4.0"
)
if not isinstance(session.adapters[prefix], FingerprintAdapter): if not isinstance(session.adapters[prefix], FingerprintAdapter):
fingerprint_adapter = FingerprintAdapter(fingerprint) fingerprint_adapter = FingerprintAdapter(fingerprint)
session.mount(prefix, fingerprint_adapter) session.mount(prefix, fingerprint_adapter)
def request(method, url, session=None, latin1_fallback=True, def request(
verify_fingerprint=None, **kwargs): method, url, session=None, latin1_fallback=True, verify_fingerprint=None, **kwargs
''' ):
"""
Wrapper method for requests, to ease logging and mocking. Parameters should Wrapper method for requests, to ease logging and mocking. Parameters should
be the same as for ``requests.request``, except: be the same as for ``requests.request``, except:
@ -123,7 +130,7 @@ def request(method, url, session=None, latin1_fallback=True,
autodetection (usually ending up with utf8) instead of plainly falling autodetection (usually ending up with utf8) instead of plainly falling
back to this silly default. See back to this silly default. See
https://github.com/kennethreitz/requests/issues/2042 https://github.com/kennethreitz/requests/issues/2042
''' """
if session is None: if session is None:
session = requests.Session() session = requests.Session()
@ -131,25 +138,28 @@ def request(method, url, session=None, latin1_fallback=True,
if verify_fingerprint is not None: if verify_fingerprint is not None:
_install_fingerprint_adapter(session, verify_fingerprint) _install_fingerprint_adapter(session, verify_fingerprint)
session.hooks = dict(response=_fix_redirects) session.hooks = {"response": _fix_redirects}
func = session.request func = session.request
logger.debug(f'{method} {url}') logger.debug("=" * 20)
logger.debug(kwargs.get('headers', {})) logger.debug(f"{method} {url}")
logger.debug(kwargs.get('data', None)) logger.debug(kwargs.get("headers", {}))
logger.debug('Sending request...') logger.debug(kwargs.get("data", None))
logger.debug("Sending request...")
assert isinstance(kwargs.get('data', b''), bytes) assert isinstance(kwargs.get("data", b""), bytes)
r = func(method, url, **kwargs) r = func(method, url, **kwargs)
# See https://github.com/kennethreitz/requests/issues/2042 # See https://github.com/kennethreitz/requests/issues/2042
content_type = r.headers.get('Content-Type', '') content_type = r.headers.get("Content-Type", "")
if not latin1_fallback and \ if (
'charset' not in content_type and \ not latin1_fallback
content_type.startswith('text/'): and "charset" not in content_type
logger.debug('Removing latin1 fallback') and content_type.startswith("text/")
):
logger.debug("Removing latin1 fallback")
r.encoding = None r.encoding = None
logger.debug(r.status_code) logger.debug(r.status_code)
@ -166,7 +176,7 @@ def request(method, url, session=None, latin1_fallback=True,
def _fix_redirects(r, *args, **kwargs): def _fix_redirects(r, *args, **kwargs):
''' """
Requests discards of the body content when it is following a redirect that Requests discards of the body content when it is following a redirect that
is not a 307 or 308. We never want that to happen. is not a 307 or 308. We never want that to happen.
@ -177,7 +187,7 @@ def _fix_redirects(r, *args, **kwargs):
FIXME: This solution isn't very nice. A new hook in requests would be FIXME: This solution isn't very nice. A new hook in requests would be
better. better.
''' """
if r.is_redirect: if r.is_redirect:
logger.debug('Rewriting status code from %s to 307', r.status_code) logger.debug("Rewriting status code from %s to 307", r.status_code)
r.status_code = 307 r.status_code = 307

View file

@ -16,39 +16,37 @@ class MetaSyncConflict(MetaSyncError):
def metasync(storage_a, storage_b, status, keys, conflict_resolution=None): def metasync(storage_a, storage_b, status, keys, conflict_resolution=None):
def _a_to_b(): def _a_to_b():
logger.info(f'Copying {key} to {storage_b}') logger.info(f"Copying {key} to {storage_b}")
storage_b.set_meta(key, a) storage_b.set_meta(key, a)
status[key] = a status[key] = a
def _b_to_a(): def _b_to_a():
logger.info(f'Copying {key} to {storage_a}') logger.info(f"Copying {key} to {storage_a}")
storage_a.set_meta(key, b) storage_a.set_meta(key, b)
status[key] = b status[key] = b
def _resolve_conflict(): def _resolve_conflict():
if a == b: if a == b:
status[key] = a status[key] = a
elif conflict_resolution == 'a wins': elif conflict_resolution == "a wins":
_a_to_b() _a_to_b()
elif conflict_resolution == 'b wins': elif conflict_resolution == "b wins":
_b_to_a() _b_to_a()
else: else:
if callable(conflict_resolution): if callable(conflict_resolution):
logger.warning('Custom commands don\'t work on metasync.') logger.warning("Custom commands don't work on metasync.")
elif conflict_resolution is not None: elif conflict_resolution is not None:
raise exceptions.UserError( raise exceptions.UserError("Invalid conflict resolution setting.")
'Invalid conflict resolution setting.'
)
raise MetaSyncConflict(key) raise MetaSyncConflict(key)
for key in keys: for key in keys:
a = storage_a.get_meta(key) a = storage_a.get_meta(key)
b = storage_b.get_meta(key) b = storage_b.get_meta(key)
s = normalize_meta_value(status.get(key)) s = normalize_meta_value(status.get(key))
logger.debug(f'Key: {key}') logger.debug(f"Key: {key}")
logger.debug(f'A: {a}') logger.debug(f"A: {a}")
logger.debug(f'B: {b}') logger.debug(f"B: {b}")
logger.debug(f'S: {s}') logger.debug(f"S: {s}")
if a != s and b != s: if a != s and b != s:
_resolve_conflict() _resolve_conflict()

View file

@ -16,17 +16,17 @@ def repair_storage(storage, repair_unsafe_uid):
all_hrefs = list(storage.list()) all_hrefs = list(storage.list())
for i, (href, _) in enumerate(all_hrefs): for i, (href, _) in enumerate(all_hrefs):
item, etag = storage.get(href) item, etag = storage.get(href)
logger.info('[{}/{}] Processing {}' logger.info("[{}/{}] Processing {}".format(i, len(all_hrefs), href))
.format(i, len(all_hrefs), href))
try: try:
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)
except IrreparableItem: except IrreparableItem:
logger.error('Item {!r} is malformed beyond repair. ' logger.error(
'The PRODID property may indicate which software ' "Item {!r} is malformed beyond repair. "
'created this item.' "The PRODID property may indicate which software "
.format(href)) "created this item.".format(href)
logger.error(f'Item content: {item.raw!r}') )
logger.error(f"Item content: {item.raw!r}")
continue continue
seen_uids.add(new_item.uid) seen_uids.add(new_item.uid)
@ -45,17 +45,18 @@ def repair_item(href, item, seen_uids, repair_unsafe_uid):
new_item = item new_item = item
if not item.uid: if not item.uid:
logger.warning('No UID, assigning random UID.') logger.warning("No UID, assigning random UID.")
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
elif item.uid in seen_uids: elif item.uid in seen_uids:
logger.warning('Duplicate UID, assigning random UID.') logger.warning("Duplicate UID, assigning random UID.")
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
elif not href_safe(item.uid) or not href_safe(basename(href)): elif not href_safe(item.uid) or not href_safe(basename(href)):
if not repair_unsafe_uid: if not repair_unsafe_uid:
logger.warning('UID may cause problems, add ' logger.warning(
'--repair-unsafe-uid to repair.') "UID may cause problems, add " "--repair-unsafe-uid to repair."
)
else: else:
logger.warning('UID or href is unsafe, assigning random UID.') logger.warning("UID or href is unsafe, assigning random UID.")
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
if not new_item.uid: if not new_item.uid:

View file

@ -1,6 +1,6 @@
''' """
There are storage classes which control the access to one vdir-collection and There are storage classes which control the access to one vdir-collection and
offer basic CRUD-ish methods for modifying those collections. The exact offer basic CRUD-ish methods for modifying those collections. The exact
interface is described in `vdirsyncer.storage.base`, the `Storage` class should interface is described in `vdirsyncer.storage.base`, the `Storage` class should
be a superclass of all storage classes. be a superclass of all storage classes.
''' """

Some files were not shown because too many files have changed in this diff Show more