mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-03-28 09:25:50 +00:00
Compare commits
No commits in common. "main" and "0.9.2" have entirely different histories.
157 changed files with 6733 additions and 11765 deletions
|
|
@ -1,49 +0,0 @@
|
||||||
# Run tests using the packaged dependencies on ArchLinux.
|
|
||||||
|
|
||||||
image: archlinux
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-compose
|
|
||||||
# Build dependencies:
|
|
||||||
- python-wheel
|
|
||||||
- python-build
|
|
||||||
- python-installer
|
|
||||||
- python-setuptools-scm
|
|
||||||
# Runtime dependencies:
|
|
||||||
- python-click
|
|
||||||
- python-click-log
|
|
||||||
- python-click-threading
|
|
||||||
- python-requests
|
|
||||||
- python-aiohttp-oauthlib
|
|
||||||
- python-tenacity
|
|
||||||
# Test dependencies:
|
|
||||||
- python-hypothesis
|
|
||||||
- python-pytest-cov
|
|
||||||
- python-pytest-httpserver
|
|
||||||
- python-trustme
|
|
||||||
- python-pytest-asyncio
|
|
||||||
- python-aiohttp
|
|
||||||
- python-aiostream
|
|
||||||
- python-aioresponses
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: radicale xandikos
|
|
||||||
REQUIREMENTS: release
|
|
||||||
# TODO: ETESYNC_TESTS
|
|
||||||
tasks:
|
|
||||||
- check-python:
|
|
||||||
python --version | grep 'Python 3.13'
|
|
||||||
- docker: |
|
|
||||||
sudo systemctl start docker
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
python -m build --wheel --skip-dependency-check --no-isolation
|
|
||||||
sudo python -m installer dist/*.whl
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
# Run tests using oldest available dependency versions.
|
|
||||||
#
|
|
||||||
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
|
||||||
# here, and consider that our "oldest suppported environment".
|
|
||||||
|
|
||||||
image: alpine/3.19 # python 3.11
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-cli
|
|
||||||
- docker-compose
|
|
||||||
- py3-pip
|
|
||||||
- python3-dev
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: radicale xandikos
|
|
||||||
REQUIREMENTS: minimal
|
|
||||||
tasks:
|
|
||||||
- venv: |
|
|
||||||
python3 -m venv $HOME/venv
|
|
||||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
|
||||||
- docker: |
|
|
||||||
sudo addgroup $(whoami) docker
|
|
||||||
sudo service docker start
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
# Hack, no idea why it's needed
|
|
||||||
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
|
|
||||||
make -e install-dev
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
|
|
@ -1,45 +0,0 @@
|
||||||
# Run tests using latest dependencies from PyPI
|
|
||||||
|
|
||||||
image: archlinux
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-compose
|
|
||||||
- python-pip
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
secrets:
|
|
||||||
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: baikal radicale xandikos
|
|
||||||
REQUIREMENTS: release
|
|
||||||
# TODO: ETESYNC_TESTS
|
|
||||||
tasks:
|
|
||||||
- venv: |
|
|
||||||
python -m venv $HOME/venv
|
|
||||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
|
||||||
- docker: |
|
|
||||||
sudo systemctl start docker
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e install-dev
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
- check: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make check
|
|
||||||
- check-secrets: |
|
|
||||||
# Stop here if this is a PR. PRs can't run with the below secrets.
|
|
||||||
[ -f ~/fastmail-secrets ] || complete-build
|
|
||||||
- extra-storages: |
|
|
||||||
set +x
|
|
||||||
source ~/fastmail-secrets
|
|
||||||
set -x
|
|
||||||
|
|
||||||
cd vdirsyncer
|
|
||||||
export PATH=$PATH:~/.local/bin/
|
|
||||||
DAV_SERVER=fastmail pytest tests/storage
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
comment: false
|
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
patch: false
|
|
||||||
22
.coveragerc
22
.coveragerc
|
|
@ -1,22 +0,0 @@
|
||||||
[run]
|
|
||||||
branch = True
|
|
||||||
|
|
||||||
[paths]
|
|
||||||
source = vdirsyncer/
|
|
||||||
|
|
||||||
[report]
|
|
||||||
exclude_lines =
|
|
||||||
# Have to re-enable the standard pragma
|
|
||||||
pragma: no cover
|
|
||||||
|
|
||||||
# Don't complain about missing debug-only code:
|
|
||||||
def __repr__
|
|
||||||
if self\.debug
|
|
||||||
|
|
||||||
# Don't complain if tests don't hit defensive assertion code:
|
|
||||||
raise AssertionError
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
# Don't complain if non-runnable code isn't run:
|
|
||||||
if 0:
|
|
||||||
if __name__ == .__main__.:
|
|
||||||
1
.envrc
1
.envrc
|
|
@ -1 +0,0 @@
|
||||||
layout python3
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -6,7 +6,6 @@ build
|
||||||
env
|
env
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.cache
|
.cache
|
||||||
.pytest_cache
|
|
||||||
.eggs
|
.eggs
|
||||||
.egg
|
.egg
|
||||||
.xprocess
|
.xprocess
|
||||||
|
|
@ -14,4 +13,3 @@ dist
|
||||||
docs/_build/
|
docs/_build/
|
||||||
vdirsyncer/version.py
|
vdirsyncer/version.py
|
||||||
.hypothesis
|
.hypothesis
|
||||||
coverage.xml
|
|
||||||
|
|
|
||||||
12
.gitmodules
vendored
Normal file
12
.gitmodules
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
[submodule "tests/storage/servers/baikal"]
|
||||||
|
path = tests/storage/servers/baikal
|
||||||
|
url = https://github.com/vdirsyncer/baikal-testserver
|
||||||
|
[submodule "tests/storage/servers/owncloud"]
|
||||||
|
path = tests/storage/servers/owncloud
|
||||||
|
url = https://github.com/vdirsyncer/owncloud-testserver
|
||||||
|
[submodule "tests/storage/servers/mysteryshack"]
|
||||||
|
path = tests/storage/servers/mysteryshack
|
||||||
|
url = https://github.com/vdirsyncer/mysteryshack-testserver
|
||||||
|
[submodule "tests/storage/servers/davical"]
|
||||||
|
path = tests/storage/servers/davical
|
||||||
|
url = https://github.com/vdirsyncer/davical-testserver
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v5.0.0
|
|
||||||
hooks:
|
|
||||||
- id: trailing-whitespace
|
|
||||||
args: [--markdown-linebreak-ext=md]
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: check-toml
|
|
||||||
- id: check-added-large-files
|
|
||||||
- id: debug-statements
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
|
||||||
rev: "v1.15.0"
|
|
||||||
hooks:
|
|
||||||
- id: mypy
|
|
||||||
files: vdirsyncer/.*
|
|
||||||
additional_dependencies:
|
|
||||||
- types-setuptools
|
|
||||||
- types-docutils
|
|
||||||
- types-requests
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
|
||||||
rev: 'v0.11.4'
|
|
||||||
hooks:
|
|
||||||
- id: ruff
|
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
|
||||||
- id: ruff-format
|
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
- id: typos-syncroniz
|
|
||||||
name: typos-syncroniz
|
|
||||||
language: system
|
|
||||||
# Not how you spell "synchronise"
|
|
||||||
entry: sh -c "git grep -i syncroniz"
|
|
||||||
files: ".*/.*"
|
|
||||||
- id: typos-text-icalendar
|
|
||||||
name: typos-text-icalendar
|
|
||||||
language: system
|
|
||||||
# It's "text/calendar", no "i".
|
|
||||||
entry: sh -c "git grep -i 'text/icalendar'"
|
|
||||||
files: ".*/.*"
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
version: 2
|
|
||||||
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
build:
|
|
||||||
os: "ubuntu-22.04"
|
|
||||||
tools:
|
|
||||||
python: "3.9"
|
|
||||||
|
|
||||||
python:
|
|
||||||
install:
|
|
||||||
- method: pip
|
|
||||||
path: .
|
|
||||||
extra_requirements:
|
|
||||||
- docs
|
|
||||||
116
.travis.yml
Normal file
116
.travis.yml
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
# Generated by scripts/make_travisconf.py
|
||||||
|
|
||||||
|
sudo: true
|
||||||
|
language: python
|
||||||
|
|
||||||
|
install:
|
||||||
|
- ". scripts/travis-install.sh"
|
||||||
|
- "pip install -U pip"
|
||||||
|
- "pip install wheel"
|
||||||
|
- "make -e install-dev"
|
||||||
|
- "make -e install-$BUILD"
|
||||||
|
|
||||||
|
script:
|
||||||
|
- "make -e $BUILD"
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=style
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=devel
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=release
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=minimal
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=release
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=devel
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=release
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=minimal
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=devel
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=release
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=minimal
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=devel
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=release
|
||||||
|
- python: 2.7
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=minimal
|
||||||
|
- python: 3.3
|
||||||
|
env: BUILD=style
|
||||||
|
- python: 3.3
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel
|
||||||
|
- python: 3.3
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=release
|
||||||
|
- python: 3.3
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal
|
||||||
|
- python: 3.4
|
||||||
|
env: BUILD=style
|
||||||
|
- python: 3.4
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel
|
||||||
|
- python: 3.4
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=release
|
||||||
|
- python: 3.4
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=style
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=devel
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=release
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=minimal
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=release
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=devel
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=release
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=owncloud REQUIREMENTS=minimal
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=devel
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=release
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=baikal REQUIREMENTS=minimal
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=devel
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=release
|
||||||
|
- python: 3.5
|
||||||
|
env: BUILD=test DAV_SERVER=davical REQUIREMENTS=minimal
|
||||||
|
- python: pypy
|
||||||
|
env: BUILD=style
|
||||||
|
- python: pypy
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel
|
||||||
|
- python: pypy
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=release
|
||||||
|
- python: pypy
|
||||||
|
env: BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal
|
||||||
|
- language: generic
|
||||||
|
os: osx
|
||||||
|
env: BUILD=test
|
||||||
|
|
||||||
|
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- auto
|
||||||
|
- master
|
||||||
|
|
||||||
|
|
||||||
16
AUTHORS.rst
16
AUTHORS.rst
|
|
@ -4,26 +4,10 @@ Contributors
|
||||||
In alphabetical order:
|
In alphabetical order:
|
||||||
|
|
||||||
- Ben Boeckel
|
- Ben Boeckel
|
||||||
- Bleala
|
|
||||||
- Christian Geier
|
- Christian Geier
|
||||||
- Clément Mondon
|
- Clément Mondon
|
||||||
- Corey Hinshaw
|
|
||||||
- Kai Herlemann
|
|
||||||
- Hugo Osvaldo Barrera
|
- Hugo Osvaldo Barrera
|
||||||
- Jason Cox
|
|
||||||
- Julian Mehne
|
- Julian Mehne
|
||||||
- Malte Kiefer
|
|
||||||
- Marek Marczykowski-Górecki
|
|
||||||
- Markus Unterwaditzer
|
- Markus Unterwaditzer
|
||||||
- Michael Adler
|
- Michael Adler
|
||||||
- rEnr3n
|
|
||||||
- Thomas Weißschuh
|
- Thomas Weißschuh
|
||||||
- Witcher01
|
|
||||||
- samm81
|
|
||||||
|
|
||||||
Special thanks goes to:
|
|
||||||
|
|
||||||
* `FastMail <https://github.com/pimutils/vdirsyncer/issues/571>`_ sponsors a
|
|
||||||
paid account for testing their servers.
|
|
||||||
* `Packagecloud <https://packagecloud.io/>`_ provide repositories for
|
|
||||||
vdirsyncer's Debian packages.
|
|
||||||
|
|
|
||||||
366
CHANGELOG.rst
366
CHANGELOG.rst
|
|
@ -7,367 +7,7 @@ package maintainers. For further info, see the git commit log.
|
||||||
|
|
||||||
Package maintainers and users who have to manually update their installation
|
Package maintainers and users who have to manually update their installation
|
||||||
may want to subscribe to `GitHub's tag feed
|
may want to subscribe to `GitHub's tag feed
|
||||||
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
<https://github.com/untitaker/vdirsyncer/tags.atom>`_.
|
||||||
|
|
||||||
Version 0.21.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Implement retrying for ``google`` storage type when a rate limit is reached.
|
|
||||||
- ``tenacity`` is now a required dependency.
|
|
||||||
- Drop support for Python 3.8.
|
|
||||||
- Retry transient network errors for nullipotent requests.
|
|
||||||
|
|
||||||
Version 0.20.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Remove dependency on abandoned ``atomicwrites`` library.
|
|
||||||
- Implement ``filter_hook`` for the HTTP storage.
|
|
||||||
- Drop support for Python 3.7.
|
|
||||||
- Add support for Python 3.12 and Python 3.13.
|
|
||||||
- Properly close the status database after using. This especially affects tests,
|
|
||||||
where we were leaking a large amount of file descriptors.
|
|
||||||
- Extend supported versions of ``aiostream`` to include 0.7.x.
|
|
||||||
|
|
||||||
Version 0.19.3
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Added a no_delete option to the storage configuration. :gh:`1090`
|
|
||||||
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
|
|
||||||
- Add an option to request vCard v4.0. :gh:`1066`
|
|
||||||
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
|
|
||||||
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
|
|
||||||
- Implement digest auth. :gh:`1137`
|
|
||||||
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
|
|
||||||
|
|
||||||
Version 0.19.2
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Improve the performance of ``SingleFileStorage``. :gh:`818`
|
|
||||||
- Properly document some caveats of the Google Contacts storage.
|
|
||||||
- Fix crash when using auth certs. :gh:`1033`
|
|
||||||
- The ``filesystem`` storage can be specified with ``type =
|
|
||||||
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
|
|
||||||
functional impact, and is merely for forward compatibility with the Rust
|
|
||||||
implementation of vdirsyncer.
|
|
||||||
- Python 3.10 and 3.11 are officially supported.
|
|
||||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
|
||||||
Applications now need to be registered as "Desktop applications". Using "Web
|
|
||||||
application" no longer works due to changes on Google's side. :gh:`1078`
|
|
||||||
|
|
||||||
Version 0.19.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Fixed crash when operating on Google Contacts. :gh:`994`
|
|
||||||
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
|
|
||||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
|
||||||
Applications now need to be registered as "Web Application". :gh:`975`
|
|
||||||
- Various documentation updates.
|
|
||||||
|
|
||||||
Version 0.19.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Add "shell" password fetch strategy to pass command string to a shell.
|
|
||||||
- Add "description" and "order" as metadata. These fetch the CalDAV:
|
|
||||||
calendar-description, ``CardDAV:addressbook-description`` and
|
|
||||||
``apple-ns:calendar-order`` properties respectively.
|
|
||||||
- Add a new ``showconfig`` status. This prints *some* configuration values as
|
|
||||||
JSON. This is intended to be used by external tools and helpers that interact
|
|
||||||
with ``vdirsyncer``, and considered experimental.
|
|
||||||
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
|
|
||||||
"create", it implicitly creates missing collections during sync without user
|
|
||||||
prompts. This simplifies workflows where collections should be automatically
|
|
||||||
created on both sides.
|
|
||||||
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
|
|
||||||
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
|
|
||||||
- ``pytest-localserver`` is no longer required for tests.
|
|
||||||
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
|
|
||||||
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
|
|
||||||
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
|
|
||||||
This will likely continue improving over time. :gh:`906`
|
|
||||||
- The ``google`` storage types no longer require ``requests-oauthlib``, but
|
|
||||||
require ``python-aiohttp-oauthlib`` instead.
|
|
||||||
- Vdirsyncer no longer includes experimental support for `EteSync
|
|
||||||
<https://www.etesync.com/>`_. The existing integration had not been supported
|
|
||||||
for a long time and no longer worked. Support for external storages may be
|
|
||||||
added if anyone is interested in maintaining an EteSync plugin. EteSync
|
|
||||||
users should consider using `etesync-dav`_.
|
|
||||||
- The ``plist`` for macOS has been dropped. It was broken and homebrew
|
|
||||||
generates their own based on package metadata. macOS users are encouraged to
|
|
||||||
use that as a reference.
|
|
||||||
|
|
||||||
.. _etesync-dav: https://github.com/etesync/etesync-dav
|
|
||||||
|
|
||||||
Changes to SSL configuration
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
|
|
||||||
you're validating certificate fingerprints, use ``sha256`` instead.
|
|
||||||
|
|
||||||
When using a custom ``verify_fingerprint``, CA validation is always disabled.
|
|
||||||
|
|
||||||
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
|
|
||||||
both features is insecure and no longer supported.
|
|
||||||
|
|
||||||
The ``verify`` parameter no longer takes boolean values, it is now optional and
|
|
||||||
only takes a string to a custom CA for verification.
|
|
||||||
|
|
||||||
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
|
|
||||||
parameter in future.
|
|
||||||
|
|
||||||
Version 0.18.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
Note: Version 0.17 has some alpha releases but ultimately was never finalised.
|
|
||||||
0.18 actually continues where 0.16 left off.
|
|
||||||
|
|
||||||
- Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses
|
|
||||||
on keeping vdirsyncer compatible with newer environments.
|
|
||||||
- click 8 and click-threading 0.5.0 are now required.
|
|
||||||
- For those using ``pipsi``, we now recommend using ``pipx``, it's successor.
|
|
||||||
- Python 3.9 is now supported.
|
|
||||||
- Our Debian/Ubuntu build scripts have been updated. New versions should be
|
|
||||||
pushed to those repositories soon.
|
|
||||||
|
|
||||||
Version 0.16.8
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released 09 June 2020*
|
|
||||||
|
|
||||||
- Support Python 3.7 and 3.8.
|
|
||||||
|
|
||||||
This release is functionally identical to 0.16.7.
|
|
||||||
It's been tested with recent Python versions, and has been marked as supporting
|
|
||||||
them. It will also be the final release supporting Python 3.5 and 3.6.
|
|
||||||
|
|
||||||
Version 0.16.7
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 19 July 2018*
|
|
||||||
|
|
||||||
- Fixes for Python 3.7
|
|
||||||
|
|
||||||
Version 0.16.6
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 13 June 2018*
|
|
||||||
|
|
||||||
- **Packagers:** Documentation building no longer needs a working installation
|
|
||||||
of vdirsyncer.
|
|
||||||
|
|
||||||
Version 0.16.5
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 13 June 2018*
|
|
||||||
|
|
||||||
- **Packagers:** click-log 0.3 is required.
|
|
||||||
- All output will now happen on stderr (because of the upgrade of ``click-log``).
|
|
||||||
|
|
||||||
Version 0.16.4
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 05 February 2018*
|
|
||||||
|
|
||||||
- Fix tests for new Hypothesis version. (Literally no other change included)
|
|
||||||
|
|
||||||
Version 0.16.3
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 03 October 2017*
|
|
||||||
|
|
||||||
- First version with custom Debian and Ubuntu packages. See :gh:`663`.
|
|
||||||
- Remove invalid ASCII control characters from server responses. See :gh:`626`.
|
|
||||||
- **packagers:** Python 3.3 is no longer supported. See :ghpr:`674`.
|
|
||||||
|
|
||||||
Version 0.16.2
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 24 August 2017*
|
|
||||||
|
|
||||||
- Fix crash when using daterange or item_type filters in
|
|
||||||
:storage:`google_calendar`, see :gh:`657`.
|
|
||||||
- **Packagers:** Fixes for new version ``0.2.0`` of ``click-log``. The version
|
|
||||||
requirements for the dependency ``click-log`` changed.
|
|
||||||
|
|
||||||
Version 0.16.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 8 August 2017*
|
|
||||||
|
|
||||||
- Removed remoteStorage support, see :gh:`647`.
|
|
||||||
- Fixed test failures caused by latest requests version, see :gh:`660`.
|
|
||||||
|
|
||||||
Version 0.16.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 2 June 2017*
|
|
||||||
|
|
||||||
- Strip ``METHOD:PUBLISH`` added by some calendar providers, see :gh:`502`.
|
|
||||||
- Fix crash of Google storages when saving token file.
|
|
||||||
- Make DAV discovery more RFC-conformant, see :ghpr:`585`.
|
|
||||||
- Vdirsyncer is now tested against Xandikos, see :ghpr:`601`.
|
|
||||||
- Subfolders with a leading dot are now ignored during discover for
|
|
||||||
``filesystem`` storage. This makes it easier to combine it with version
|
|
||||||
control.
|
|
||||||
- Statuses are now stored in a sqlite database. Old data is automatically
|
|
||||||
migrated. Users with really large datasets should encounter performance
|
|
||||||
improvements. This means that **sqlite3 is now a dependency of vdirsyncer**.
|
|
||||||
- **Vdirsyncer is now licensed under the 3-clause BSD license**, see :gh:`610`.
|
|
||||||
- Vdirsyncer now includes experimental support for `EteSync
|
|
||||||
<https://www.etesync.com/>`_, see :ghpr:`614`.
|
|
||||||
- Vdirsyncer now uses more filesystem metadata for determining whether an item
|
|
||||||
changed. You will notice a **possibly heavy CPU/IO spike on the first sync
|
|
||||||
after upgrading**.
|
|
||||||
- **Packagers:** Reference ``systemd.service`` and ``systemd.timer`` unit files
|
|
||||||
are provided. It is recommended to install these as documentation if your
|
|
||||||
distribution is systemd-based.
|
|
||||||
|
|
||||||
Version 0.15.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 28 February 2017*
|
|
||||||
|
|
||||||
- Deprecated syntax for configuration values is now completely rejected. All
|
|
||||||
values now have to be valid JSON.
|
|
||||||
- A few UX improvements for Google storages, see :gh:`549` and :gh:`552`.
|
|
||||||
- Fix collection discovery for :storage:`google_contacts`, see :gh:`564`.
|
|
||||||
- iCloud is now tested on Travis, see :gh:`567`.
|
|
||||||
|
|
||||||
Version 0.14.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 05 January 2017*
|
|
||||||
|
|
||||||
- ``vdirsyncer repair`` no longer changes "unsafe" UIDs by default, an extra
|
|
||||||
option has to be specified. See :gh:`527`.
|
|
||||||
- A lot of important documentation updates.
|
|
||||||
|
|
||||||
Version 0.14.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 26 October 2016*
|
|
||||||
|
|
||||||
- ``vdirsyncer sync`` now continues other uploads if one upload failed. The
|
|
||||||
exit code in such situations is still non-zero.
|
|
||||||
- Add ``partial_sync`` option to pair section. See :ref:`the config docs
|
|
||||||
<partial_sync_def>`.
|
|
||||||
- Vdirsyncer will now warn if there's a string without quotes in your config.
|
|
||||||
Please file issues if you find documentation that uses unquoted strings.
|
|
||||||
- Fix an issue that would break khal's config setup wizard.
|
|
||||||
|
|
||||||
Version 0.13.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 30 September 2016*
|
|
||||||
|
|
||||||
- Fix a bug that would completely break collection discovery.
|
|
||||||
|
|
||||||
Version 0.13.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 29 September 2016*
|
|
||||||
|
|
||||||
- Python 2 is no longer supported at all. See :gh:`219`.
|
|
||||||
- Config sections are now checked for duplicate names. This also means that you
|
|
||||||
cannot have a storage section ``[storage foo]`` and a pair ``[pair foo]`` in
|
|
||||||
your config, they have to have different names. This is done such that
|
|
||||||
console output is always unambiguous. See :gh:`459`.
|
|
||||||
- Custom commands can now be used for conflict resolution during sync. See
|
|
||||||
:gh:`127`.
|
|
||||||
- :storage:`http` now completely ignores UIDs. This avoids a lot of unnecessary
|
|
||||||
down- and uploads.
|
|
||||||
|
|
||||||
Version 0.12.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 20 August 2016*
|
|
||||||
|
|
||||||
- Fix a crash for Google and DAV storages. See :ghpr:`492`.
|
|
||||||
- Fix an URL-encoding problem with DavMail. See :gh:`491`.
|
|
||||||
|
|
||||||
Version 0.12
|
|
||||||
============
|
|
||||||
|
|
||||||
*released on 19 August 2016*
|
|
||||||
|
|
||||||
- :storage:`singlefile` now supports collections. See :ghpr:`488`.
|
|
||||||
|
|
||||||
Version 0.11.3
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 29 July 2016*
|
|
||||||
|
|
||||||
- Default value of ``auth`` parameter was changed from ``guess`` to ``basic``
|
|
||||||
to resolve issues with the Apple Calendar Server (:gh:`457`) and improve
|
|
||||||
performance. See :gh:`461`.
|
|
||||||
- **Packagers:** The ``click-threading`` requirement is now ``>=0.2``. It was
|
|
||||||
incorrect before. See :gh:`478`.
|
|
||||||
- Fix a bug in the DAV XML parsing code that would make vdirsyncer crash on
|
|
||||||
certain input. See :gh:`480`.
|
|
||||||
- Redirect chains should now be properly handled when resolving ``well-known``
|
|
||||||
URLs. See :ghpr:`481`.
|
|
||||||
|
|
||||||
Version 0.11.2
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 15 June 2016*
|
|
||||||
|
|
||||||
- Fix typo that would break tests.
|
|
||||||
|
|
||||||
Version 0.11.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 15 June 2016*
|
|
||||||
|
|
||||||
- Fix a bug in collection validation.
|
|
||||||
- Fix a cosmetic bug in debug output.
|
|
||||||
- Various documentation improvements.
|
|
||||||
|
|
||||||
Version 0.11.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 19 May 2016*
|
|
||||||
|
|
||||||
- Discovery is no longer automatically done when running ``vdirsyncer sync``.
|
|
||||||
``vdirsyncer discover`` now has to be explicitly called.
|
|
||||||
- Add a ``.plist`` example for Mac OS X.
|
|
||||||
- Usage under Python 2 now requires a special config parameter to be set.
|
|
||||||
- Various deprecated configuration parameters do no longer have specialized
|
|
||||||
errormessages. The generic error message for unknown parameters is shown.
|
|
||||||
|
|
||||||
- Vdirsyncer no longer warns that the ``passwordeval`` parameter has been
|
|
||||||
renamed to ``password_command``.
|
|
||||||
|
|
||||||
- The ``keyring`` fetching strategy has been dropped some versions ago, but
|
|
||||||
the specialized error message has been dropped.
|
|
||||||
|
|
||||||
- An old status format from version 0.4 is no longer supported. If you're
|
|
||||||
experiencing problems, just delete your status folder.
|
|
||||||
|
|
||||||
Version 0.10.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 23 April 2016*
|
|
||||||
|
|
||||||
- New storage types :storage:`google_calendar` and :storage:`google_contacts`
|
|
||||||
have been added.
|
|
||||||
- New global command line option `--config`, to specify an alternative config
|
|
||||||
file. See :gh:`409`.
|
|
||||||
- The ``collections`` parameter can now be used to synchronize
|
|
||||||
differently-named collections with each other.
|
|
||||||
- **Packagers:** The ``lxml`` dependency has been dropped.
|
|
||||||
- XML parsing is now a lot stricter. Malfunctioning servers that used to work
|
|
||||||
with vdirsyncer may stop working.
|
|
||||||
|
|
||||||
Version 0.9.3
|
|
||||||
=============
|
|
||||||
|
|
||||||
*released on 22 March 2016*
|
|
||||||
|
|
||||||
- :storage:`singlefile` and :storage:`http` now handle recurring events
|
|
||||||
properly.
|
|
||||||
- Fix a typo in the packaging guidelines.
|
|
||||||
- Moved to ``pimutils`` organization on GitHub. Old links *should* redirect,
|
|
||||||
but be aware of client software that doesn't properly handle redirects.
|
|
||||||
|
|
||||||
Version 0.9.2
|
Version 0.9.2
|
||||||
=============
|
=============
|
||||||
|
|
@ -439,8 +79,8 @@ Version 0.7.4
|
||||||
- Improved error messages instead of faulty server behavior, see :gh:`290` and
|
- Improved error messages instead of faulty server behavior, see :gh:`290` and
|
||||||
:gh:`300`.
|
:gh:`300`.
|
||||||
- Safer shutdown of threadpool, avoid exceptions, see :gh:`291`.
|
- Safer shutdown of threadpool, avoid exceptions, see :gh:`291`.
|
||||||
- Fix a sync bug for read-only storages see commit
|
- Fix a sync bug for read-only storages see commmit
|
||||||
``ed22764921b2e5bf6a934cf14aa9c5fede804d8e``.
|
`ed22764921b2e5bf6a934cf14aa9c5fede804d8e`.
|
||||||
- Etag changes are no longer sufficient to trigger sync operations. An actual
|
- Etag changes are no longer sufficient to trigger sync operations. An actual
|
||||||
content change is also necessary. See :gh:`257`.
|
content change is also necessary. See :gh:`257`.
|
||||||
- :storage:`remotestorage` now automatically opens authentication dialogs in
|
- :storage:`remotestorage` now automatically opens authentication dialogs in
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
See `the pimutils CoC <http://pimutils.org/coc>`_.
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
Please see `the documentation
|
Please see `the documentation
|
||||||
<https://vdirsyncer.pimutils.org/en/stable/contributing.html>`_ for how to
|
<https://vdirsyncer.readthedocs.org/en/stable/contributing.html>`_ for how to
|
||||||
contribute to this project.
|
contribute to this project.
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,9 @@
|
||||||
Before you submit bug reports: https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
Before you submit bug reports: https://vdirsyncer.readthedocs.org/en/stable/contributing.html
|
||||||
|
|
||||||
Things to include in your bugreport:
|
Things to include in your bugreport:
|
||||||
|
|
||||||
* Your vdirsyncer version
|
* Your vdirsyncer version
|
||||||
* If applicable, which server software (and which version) you're using
|
|
||||||
* Your Python version
|
* Your Python version
|
||||||
* Your operating system
|
* Your operating system
|
||||||
* Your config file
|
* Your config file
|
||||||
* Use `vdirsyncer -vdebug` for debug output. The output is sensitive, but
|
* Use `vdirsyncer -vdebug` for debug output.
|
||||||
please attach at least the last few lines before the error (if applicable),
|
|
||||||
censored as necessary. This is almost always the most useful information.
|
|
||||||
|
|
|
||||||
46
LICENSE
46
LICENSE
|
|
@ -1,33 +1,19 @@
|
||||||
Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See
|
Copyright (c) 2014-2016 Markus Unterwaditzer & contributors
|
||||||
AUTHORS.rst for more details.
|
|
||||||
|
|
||||||
Some rights reserved.
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
Redistribution and use in source and binary forms of the software as well
|
The above copyright notice and this permission notice shall be included in all
|
||||||
as documentation, with or without modification, are permitted provided
|
copies or substantial portions of the Software.
|
||||||
that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
notice, this list of conditions and the following disclaimer.
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
* Redistributions in binary form must reproduce the above
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
copyright notice, this list of conditions and the following
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
disclaimer in the documentation and/or other materials provided
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
with the distribution.
|
SOFTWARE.
|
||||||
|
|
||||||
* The names of the contributors may not be used to endorse or
|
|
||||||
promote products derived from this software without specific
|
|
||||||
prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
|
||||||
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
|
|
||||||
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
|
|
||||||
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
|
||||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
|
||||||
DAMAGE.
|
|
||||||
|
|
|
||||||
13
MANIFEST.in
13
MANIFEST.in
|
|
@ -1,6 +1,13 @@
|
||||||
# setuptools-scm includes everything tracked by git
|
include AUTHORS.rst
|
||||||
prune docker
|
include CHANGELOG.rst
|
||||||
prune scripts
|
include LICENSE
|
||||||
|
include config.example
|
||||||
|
include Makefile
|
||||||
|
include test-requirements.txt
|
||||||
|
include docs-requirements.txt
|
||||||
|
|
||||||
|
recursive-include docs *
|
||||||
|
recursive-include tests *
|
||||||
prune tests/storage/servers
|
prune tests/storage/servers
|
||||||
recursive-include tests/storage/servers/radicale *
|
recursive-include tests/storage/servers/radicale *
|
||||||
recursive-include tests/storage/servers/skip *
|
recursive-include tests/storage/servers/skip *
|
||||||
|
|
|
||||||
132
Makefile
132
Makefile
|
|
@ -1,63 +1,93 @@
|
||||||
# See the documentation on how to run the tests:
|
# See the documentation on how to run the tests.
|
||||||
# https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
|
||||||
|
|
||||||
# Which DAV server to run the tests against (radicale, xandikos, skip, owncloud, nextcloud, ...)
|
|
||||||
export DAV_SERVER := skip
|
export DAV_SERVER := skip
|
||||||
|
export REMOTESTORAGE_SERVER := skip
|
||||||
# release (install release versions of dependencies)
|
export RADICALE_BACKEND := filesystem
|
||||||
# development (install development versions of some of vdirsyncer's dependencies)
|
|
||||||
# or minimal (install oldest version of each dependency that is supported by vdirsyncer)
|
|
||||||
export REQUIREMENTS := release
|
export REQUIREMENTS := release
|
||||||
|
export TESTSERVER_BASE := ./tests/storage/servers/
|
||||||
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
export CI := false
|
||||||
export DETERMINISTIC_TESTS := false
|
export DETERMINISTIC_TESTS := false
|
||||||
|
|
||||||
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
install-servers:
|
||||||
# heavily "pollute" your system, such as attempting to install a new Python
|
|
||||||
# systemwide.
|
|
||||||
export CI := false
|
|
||||||
|
|
||||||
# Whether to generate coverage data while running tests.
|
|
||||||
export COVERAGE := $(CI)
|
|
||||||
|
|
||||||
# Variables below this line are not very interesting for getting started.
|
|
||||||
|
|
||||||
CODECOV_PATH = /tmp/codecov.sh
|
|
||||||
|
|
||||||
all:
|
|
||||||
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
|
|
||||||
|
|
||||||
ci-test:
|
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
|
||||||
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
|
|
||||||
bash $(CODECOV_PATH) -c
|
|
||||||
|
|
||||||
ci-test-storage:
|
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
|
||||||
set -ex; \
|
set -ex; \
|
||||||
for server in $(DAV_SERVER); do \
|
for server in $(DAV_SERVER) $(REMOTESTORAGE_SERVER); do \
|
||||||
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
|
if [ ! -d "$(TESTSERVER_BASE)$$server/" ]; then \
|
||||||
|
git submodule update --init -- "$(TESTSERVER_BASE)$$server"; \
|
||||||
|
fi; \
|
||||||
|
(cd $(TESTSERVER_BASE)$$server && sh install.sh); \
|
||||||
done
|
done
|
||||||
bash $(CODECOV_PATH) -c
|
|
||||||
|
|
||||||
check:
|
install-test: install-servers
|
||||||
ruff check
|
(python --version | grep -vq 'Python 3.3') || pip install enum34
|
||||||
ruff format --diff
|
pip install -r test-requirements.txt
|
||||||
#mypy vdirsyncer
|
set -xe && if [ "$$REQUIREMENTS" = "devel" ]; then \
|
||||||
|
pip install -U --force-reinstall \
|
||||||
|
git+https://github.com/DRMacIver/hypothesis \
|
||||||
|
git+https://github.com/pytest-dev/pytest; \
|
||||||
|
fi
|
||||||
|
[ $(CI) != "true" ] || pip install coverage codecov
|
||||||
|
|
||||||
release-deb:
|
test:
|
||||||
sh scripts/release-deb.sh debian jessie
|
set -e; \
|
||||||
sh scripts/release-deb.sh debian stretch
|
if [ "$(CI)" = "true" ]; then \
|
||||||
sh scripts/release-deb.sh ubuntu trusty
|
coverage run --source=vdirsyncer/ --module pytest; \
|
||||||
sh scripts/release-deb.sh ubuntu xenial
|
codecov; \
|
||||||
sh scripts/release-deb.sh ubuntu zesty
|
else \
|
||||||
|
py.test; \
|
||||||
install-dev:
|
|
||||||
pip install -U pip setuptools wheel
|
|
||||||
pip install -e '.[test,check,docs]'
|
|
||||||
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
|
||||||
pip install pyproject-dependencies && \
|
|
||||||
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
install-style:
|
||||||
|
pip install flake8 flake8-import-order sphinx
|
||||||
|
|
||||||
|
style:
|
||||||
|
flake8
|
||||||
|
! grep -ri syncroniz */*
|
||||||
|
sphinx-build -W -b html ./docs/ ./docs/_build/html/
|
||||||
|
python3 scripts/make_travisconf.py | \
|
||||||
|
diff -q .travis.yml - > /dev/null || \
|
||||||
|
(echo 'travis.yml is outdated. Run `make travis-conf`.' && false)
|
||||||
|
|
||||||
|
travis-conf:
|
||||||
|
python3 scripts/make_travisconf.py > .travis.yml
|
||||||
|
|
||||||
|
install-docs:
|
||||||
|
pip install -r docs-requirements.txt
|
||||||
|
|
||||||
|
docs:
|
||||||
|
cd docs && make html
|
||||||
|
|
||||||
|
sh: # open subshell with default test config
|
||||||
|
$$SHELL;
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
|
||||||
|
|
||||||
|
all:
|
||||||
|
$(error Take a look at https://vdirsyncer.readthedocs.org/en/stable/tutorial.html#installation)
|
||||||
|
|
||||||
|
release:
|
||||||
|
python setup.py sdist bdist_wheel upload
|
||||||
|
|
||||||
|
install-dev:
|
||||||
|
set -xe && if [ "$$REMOTESTORAGE_SERVER" != "skip" ]; then \
|
||||||
|
pip install -e .[remotestorage]; \
|
||||||
|
else \
|
||||||
|
pip install -e .; \
|
||||||
|
fi
|
||||||
|
set -xe && if [ "$$REQUIREMENTS" = "devel" ]; then \
|
||||||
|
pip install -U --force-reinstall \
|
||||||
|
git+https://github.com/mitsuhiko/click \
|
||||||
|
git+https://github.com/kennethreitz/requests; \
|
||||||
|
elif [ "$$REQUIREMENTS" = "minimal" ]; then \
|
||||||
|
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
|
||||||
|
fi
|
||||||
|
|
||||||
|
ssh-submodule-urls:
|
||||||
|
git submodule foreach "\
|
||||||
|
echo -n 'Old: '; \
|
||||||
|
git remote get-url origin; \
|
||||||
|
git remote set-url origin \$$(git remote get-url origin | sed -e 's/https:\/\/github\.com\//git@github.com:/g'); \
|
||||||
|
echo -n 'New URL: '; \
|
||||||
|
git remote get-url origin"
|
||||||
|
|
||||||
.PHONY: docs
|
.PHONY: docs
|
||||||
|
|
|
||||||
91
README.rst
91
README.rst
|
|
@ -2,73 +2,62 @@
|
||||||
vdirsyncer
|
vdirsyncer
|
||||||
==========
|
==========
|
||||||
|
|
||||||
.. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg
|
- `Documentation <https://vdirsyncer.readthedocs.org/en/stable/>`_
|
||||||
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
- `Source code <https://github.com/untitaker/vdirsyncer>`_
|
||||||
:alt: CI status
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main
|
Vdirsyncer synchronizes your calendars and addressbooks between two storages_.
|
||||||
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main
|
The most popular purpose is to synchronize a CalDAV/CardDAV server with a local
|
||||||
:alt: Codecov coverage report
|
folder or file. The local data can then be accessed via a variety of programs_,
|
||||||
|
none of which have to know or worry about syncing to a server.
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
.. _storages: https://vdirsyncer.readthedocs.org/en/latest/config.html#storages
|
||||||
:target: https://vdirsyncer.rtfd.org/
|
.. _programs: https://vdirsyncer.readthedocs.org/en/stable/supported.html
|
||||||
:alt: documentation
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/vdirsyncer.svg
|
It aims to be for CalDAV and CardDAV what `OfflineIMAP
|
||||||
:target: https://pypi.python.org/pypi/vdirsyncer
|
<http://offlineimap.org/>`_ is for IMAP.
|
||||||
:alt: version on pypi
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg
|
.. image:: https://travis-ci.org/untitaker/vdirsyncer.png?branch=master
|
||||||
:target: https://packagecloud.io/pimutils/vdirsyncer
|
:target: https://travis-ci.org/untitaker/vdirsyncer
|
||||||
:alt: Debian packages
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
.. image:: https://codecov.io/github/untitaker/vdirsyncer/coverage.svg?branch=master
|
||||||
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE
|
:target: https://codecov.io/github/untitaker/vdirsyncer?branch=master
|
||||||
:alt: licence: BSD
|
|
||||||
|
|
||||||
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
|
||||||
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
|
||||||
|
|
||||||
Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks
|
|
||||||
between a variety of servers and the local filesystem. The most popular usecase
|
|
||||||
is to synchronize a server with a local folder and use a set of other programs_
|
|
||||||
to change the local events and contacts. Vdirsyncer can then synchronize those
|
|
||||||
changes back to the server.
|
|
||||||
|
|
||||||
However, vdirsyncer is not limited to synchronizing between clients and
|
|
||||||
servers. It can also be used to synchronize calendars and/or addressbooks
|
|
||||||
between two servers directly.
|
|
||||||
|
|
||||||
It aims to be for calendars and contacts what `OfflineIMAP
|
|
||||||
<https://www.offlineimap.org/>`_ is for emails.
|
|
||||||
|
|
||||||
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
|
||||||
|
|
||||||
Links of interest
|
Links of interest
|
||||||
=================
|
=================
|
||||||
|
|
||||||
* Check out `the tutorial
|
* Check out `the tutorial
|
||||||
<https://vdirsyncer.pimutils.org/en/stable/tutorial.html>`_ for basic
|
<https://vdirsyncer.readthedocs.org/en/stable/tutorial.html>`_ for basic
|
||||||
usage.
|
usage.
|
||||||
|
|
||||||
* `Contact information
|
* `Contact information
|
||||||
<https://vdirsyncer.pimutils.org/en/stable/contact.html>`_
|
<https://vdirsyncer.readthedocs.org/en/stable/contact.html>`_
|
||||||
|
|
||||||
* `How to contribute to this project
|
* `How to contribute to this project
|
||||||
<https://vdirsyncer.pimutils.org/en/stable/contributing.html>`_
|
<https://vdirsyncer.readthedocs.org/en/stable/contributing.html>`_
|
||||||
|
|
||||||
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
|
||||||
|
|
||||||
Dockerized
|
|
||||||
=================
|
|
||||||
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
|
|
||||||
Docker environment, you can check out the following GitHub Repository:
|
|
||||||
|
|
||||||
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
|
|
||||||
|
|
||||||
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
|
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Licensed under the 3-clause BSD license, see ``LICENSE``.
|
Licensed under the Expat/MIT license, see ``LICENSE``.
|
||||||
|
|
||||||
|
Donations
|
||||||
|
=========
|
||||||
|
|
||||||
|
If you found my work useful, please consider donating. Thank you!
|
||||||
|
|
||||||
|
- Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt``
|
||||||
|
|
||||||
|
- Bountysource is useful for funding work on a specific GitHub issue:
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/bountysource/team/vdirsyncer/activity.svg
|
||||||
|
:target: https://www.bountysource.com/teams/vdirsyncer
|
||||||
|
|
||||||
|
- There's also Bountysource `Salt
|
||||||
|
<https://salt.bountysource.com/teams/vdirsyncer>`_, for one-time and
|
||||||
|
recurring donations.
|
||||||
|
|
||||||
|
|
||||||
|
- Flattr can be used for recurring donations:
|
||||||
|
|
||||||
|
.. image:: https://api.flattr.com/button/flattr-badge-large.png
|
||||||
|
:target: https://flattr.com/submit/auto?user_id=untitaker&url=https%3A%2F%2Fgithub.com%2Funtitaker%2Fvdirsyncer
|
||||||
|
|
|
||||||
|
|
@ -5,19 +5,19 @@
|
||||||
#
|
#
|
||||||
# Optional parameters are commented out.
|
# Optional parameters are commented out.
|
||||||
# This file doesn't document all available parameters, see
|
# This file doesn't document all available parameters, see
|
||||||
# http://vdirsyncer.pimutils.org/ for the rest of them.
|
# http://vdirsyncer.readthedocs.org/ for the rest of them.
|
||||||
|
|
||||||
[general]
|
[general]
|
||||||
# A folder where vdirsyncer can store some metadata about each pair.
|
# A folder where vdirsyncer can store some metadata about each pair.
|
||||||
status_path = "~/.vdirsyncer/status/"
|
status_path = ~/.vdirsyncer/status/
|
||||||
|
|
||||||
# CARDDAV
|
# CARDDAV
|
||||||
[pair bob_contacts]
|
[pair bob_contacts]
|
||||||
# A `[pair <name>]` block defines two storages `a` and `b` that should be
|
# A `[pair <name>]` block defines two storages `a` and `b` that should be
|
||||||
# synchronized. The definition of these storages follows in `[storage <name>]`
|
# synchronized. The definition of these storages follows in `[storage <name>]`
|
||||||
# blocks. This is similar to accounts in OfflineIMAP.
|
# blocks. This is similar to accounts in OfflineIMAP.
|
||||||
a = "bob_contacts_local"
|
a = bob_contacts_local
|
||||||
b = "bob_contacts_remote"
|
b = bob_contacts_remote
|
||||||
|
|
||||||
# Synchronize all collections that can be found.
|
# Synchronize all collections that can be found.
|
||||||
# You need to run `vdirsyncer discover` if new calendars/addressbooks are added
|
# You need to run `vdirsyncer discover` if new calendars/addressbooks are added
|
||||||
|
|
@ -37,34 +37,34 @@ metadata = ["displayname"]
|
||||||
[storage bob_contacts_local]
|
[storage bob_contacts_local]
|
||||||
# A storage references actual data on a remote server or on the local disk.
|
# A storage references actual data on a remote server or on the local disk.
|
||||||
# Similar to repositories in OfflineIMAP.
|
# Similar to repositories in OfflineIMAP.
|
||||||
type = "filesystem"
|
type = filesystem
|
||||||
path = "~/.contacts/"
|
path = ~/.contacts/
|
||||||
fileext = ".vcf"
|
fileext = .vcf
|
||||||
|
|
||||||
[storage bob_contacts_remote]
|
[storage bob_contacts_remote]
|
||||||
type = "carddav"
|
type = carddav
|
||||||
url = "https://owncloud.example.com/remote.php/carddav/"
|
url = https://owncloud.example.com/remote.php/carddav/
|
||||||
#username =
|
#username =
|
||||||
# The password can also be fetched from the system password storage, netrc or a
|
# The password can also be fetched from the system password storage, netrc or a
|
||||||
# custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html
|
# custom command. See http://vdirsyncer.readthedocs.org/en/stable/keyring.html
|
||||||
#password =
|
#password =
|
||||||
|
|
||||||
# CALDAV
|
# CALDAV
|
||||||
[pair bob_calendar]
|
[pair bob_calendar]
|
||||||
a = "bob_calendar_local"
|
a = bob_calendar_local
|
||||||
b = "bob_calendar_remote"
|
b = bob_calendar_remote
|
||||||
collections = ["from a", "from b"]
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
# Calendars also have a color property
|
# Calendars also have a color property
|
||||||
metadata = ["displayname", "color"]
|
metadata = ["displayname", "color"]
|
||||||
|
|
||||||
[storage bob_calendar_local]
|
[storage bob_calendar_local]
|
||||||
type = "filesystem"
|
type = filesystem
|
||||||
path = "~/.calendars/"
|
path = ~/.calendars/
|
||||||
fileext = ".ics"
|
fileext = .ics
|
||||||
|
|
||||||
[storage bob_calendar_remote]
|
[storage bob_calendar_remote]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
url = "https://owncloud.example.com/remote.php/caldav/"
|
url = https://owncloud.example.com/remote.php/caldav/
|
||||||
#username =
|
#username =
|
||||||
#password =
|
#password =
|
||||||
|
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""Ask user to resolve a vdirsyncer sync conflict interactively.
|
|
||||||
|
|
||||||
Needs a way to ask the user.
|
|
||||||
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
|
|
||||||
|
|
||||||
Depends on python>3.5 and KDialog.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
|
|
||||||
|
|
||||||
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
|
|
||||||
|
|
||||||
This file is Free Software under the following license:
|
|
||||||
SPDX-License-Identifier: BSD-3-Clause
|
|
||||||
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
|
|
||||||
Author: <bernhard.reiter@intevation.de>
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
KDIALOG = "/usr/bin/kdialog"
|
|
||||||
|
|
||||||
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
|
|
||||||
|
|
||||||
|
|
||||||
def get_summary(icalendar_text: str):
|
|
||||||
"""Get the first SUMMARY: line from an iCalendar text.
|
|
||||||
|
|
||||||
Do not care about the line being continued.
|
|
||||||
"""
|
|
||||||
match = re.search(SUMMARY_PATTERN, icalendar_text)
|
|
||||||
return match[1]
|
|
||||||
|
|
||||||
|
|
||||||
def main(ical1_filename, ical2_filename):
|
|
||||||
ical1 = ical1_filename.read_text()
|
|
||||||
ical2 = ical2_filename.read_text()
|
|
||||||
|
|
||||||
additional_args = ["--yes-label", "take first"] # return code == 0
|
|
||||||
additional_args += ["--no-label", "take second"] # return code == 1
|
|
||||||
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
|
|
||||||
|
|
||||||
r = subprocess.run(
|
|
||||||
args=[
|
|
||||||
KDIALOG,
|
|
||||||
"--warningyesnocancel",
|
|
||||||
"There was a sync conflict, do you prefer the first entry: \n"
|
|
||||||
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
|
|
||||||
"or the second entry:\n"
|
|
||||||
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
|
|
||||||
*additional_args,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
if r.returncode == 2:
|
|
||||||
# cancel was pressed
|
|
||||||
return # shall lead to items not changed, because not copied
|
|
||||||
|
|
||||||
if r.returncode == 0:
|
|
||||||
# we want to take the first item, so overwrite the second
|
|
||||||
ical2_filename.write_text(ical1)
|
|
||||||
else: # r.returncode == 1, we want the second item, so overwrite the first
|
|
||||||
ical1_filename.write_text(ical2)
|
|
||||||
|
|
||||||
|
|
||||||
if len(sys.argv) != 3:
|
|
||||||
sys.stdout.write(__doc__)
|
|
||||||
else:
|
|
||||||
main(Path(sys.argv[1]), Path(sys.argv[2]))
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
[Unit]
|
|
||||||
Description=Synchronize calendars and contacts
|
|
||||||
Documentation=https://vdirsyncer.readthedocs.org/
|
|
||||||
StartLimitBurst=2
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/vdirsyncer sync
|
|
||||||
RuntimeMaxSec=3m
|
|
||||||
Restart=on-failure
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
[Unit]
|
|
||||||
Description=Synchronize vdirs
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnBootSec=5m
|
|
||||||
OnUnitActiveSec=15m
|
|
||||||
AccuracySec=5m
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
||||||
2
docs-requirements.txt
Normal file
2
docs-requirements.txt
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
sphinx
|
||||||
|
sphinx_rtd_theme
|
||||||
143
docs/conf.py
143
docs/conf.py
|
|
@ -1,106 +1,127 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pkg_resources import get_distribution
|
from sphinx.ext import autodoc
|
||||||
|
|
||||||
extensions = ["sphinx.ext.autodoc"]
|
import vdirsyncer
|
||||||
|
|
||||||
templates_path = ["_templates"]
|
extensions = ['sphinx.ext.autodoc']
|
||||||
|
|
||||||
source_suffix = ".rst"
|
templates_path = ['_templates']
|
||||||
master_doc = "index"
|
|
||||||
|
|
||||||
project = "vdirsyncer"
|
source_suffix = '.rst'
|
||||||
copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
|
master_doc = 'index'
|
||||||
datetime.date.today().strftime("%Y")
|
|
||||||
)
|
|
||||||
|
|
||||||
release = get_distribution("vdirsyncer").version
|
project = u'vdirsyncer'
|
||||||
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
copyright = (u'2014-{}, Markus Unterwaditzer & contributors'
|
||||||
|
.format(datetime.date.today().strftime('%Y')))
|
||||||
|
|
||||||
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
|
release = vdirsyncer.__version__
|
||||||
|
version = '.'.join(release.split('.')[:2]) # The short X.Y version.
|
||||||
|
|
||||||
exclude_patterns = ["_build"]
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
pygments_style = "sphinx"
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
html_theme = "sphinx_rtd_theme"
|
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||||
except ImportError:
|
except ImportError:
|
||||||
html_theme = "default"
|
html_theme = 'default'
|
||||||
if not on_rtd:
|
if not on_rtd:
|
||||||
print("-" * 74)
|
print('-' * 74)
|
||||||
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
|
print('Warning: sphinx-rtd-theme not installed, building with default '
|
||||||
print("-" * 74)
|
'theme.')
|
||||||
|
print('-' * 74)
|
||||||
|
|
||||||
html_static_path = ["_static"]
|
html_static_path = ['_static']
|
||||||
htmlhelp_basename = "vdirsyncerdoc"
|
htmlhelp_basename = 'vdirsyncerdoc'
|
||||||
|
|
||||||
latex_elements = {}
|
latex_elements = {}
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(
|
('index', 'vdirsyncer.tex', u'vdirsyncer Documentation',
|
||||||
"index",
|
u'Markus Unterwaditzer', 'manual'),
|
||||||
"vdirsyncer.tex",
|
|
||||||
"vdirsyncer Documentation",
|
|
||||||
"Markus Unterwaditzer",
|
|
||||||
"manual",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
man_pages = [
|
man_pages = [
|
||||||
("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1)
|
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||||
|
[u'Markus Unterwaditzer'], 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(
|
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||||
"index",
|
u'Markus Unterwaditzer', 'vdirsyncer',
|
||||||
"vdirsyncer",
|
'Synchronize calendars and contacts.', 'Miscellaneous'),
|
||||||
"vdirsyncer Documentation",
|
|
||||||
"Markus Unterwaditzer",
|
|
||||||
"vdirsyncer",
|
|
||||||
"Synchronize calendars and contacts.",
|
|
||||||
"Miscellaneous",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()):
|
def github_issue_role(name, rawtext, text, lineno, inliner, options={},
|
||||||
options = options or {}
|
content=()):
|
||||||
try:
|
try:
|
||||||
issue_num = int(text)
|
issue_num = int(text)
|
||||||
if issue_num <= 0:
|
if issue_num <= 0:
|
||||||
raise ValueError
|
raise ValueError()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
msg = inliner.reporter.error('Invalid GitHub issue: {}'.format(text),
|
||||||
|
line=lineno)
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
|
|
||||||
|
import vdirsyncer
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
link = '{}/{}/{}'.format(vdirsyncer.PROJECT_HOME,
|
||||||
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
'issues' if name == 'gh' else 'pull',
|
||||||
link = "{}/{}/{}".format(
|
issue_num)
|
||||||
PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num
|
linktext = ('issue #{}' if name == 'gh'
|
||||||
)
|
else 'pull request #{}').format(issue_num)
|
||||||
linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num)
|
node = nodes.reference(rawtext, linktext, refuri=link,
|
||||||
node = nodes.reference(rawtext, linktext, refuri=link, **options)
|
**options)
|
||||||
return [node], []
|
return [node], []
|
||||||
|
|
||||||
|
|
||||||
|
class StorageDocumenter(autodoc.ClassDocumenter):
|
||||||
|
'''Custom formatter for auto-documenting storage classes. It assumes that
|
||||||
|
the first line of the class' docstring is its own paragraph.
|
||||||
|
|
||||||
|
After that first paragraph, an example configuration will be inserted and
|
||||||
|
Sphinx' __init__ signature removed.'''
|
||||||
|
|
||||||
|
objtype = 'storage'
|
||||||
|
domain = None
|
||||||
|
directivetype = 'storage'
|
||||||
|
option_spec = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def can_document_member(cls, member, membername, isattr, parent):
|
||||||
|
from vdirsyncer.storage.base import Storage
|
||||||
|
return isinstance(member, Storage)
|
||||||
|
|
||||||
|
def format_signature(self):
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def add_directive_header(self, sig):
|
||||||
|
directive = getattr(self, 'directivetype', self.objtype)
|
||||||
|
name = self.object.storage_name
|
||||||
|
self.add_line(u'.. %s:: %s%s' % (directive, name, sig),
|
||||||
|
'<autodoc>')
|
||||||
|
|
||||||
|
def get_doc(self, encoding=None, ignore=1):
|
||||||
|
from vdirsyncer.cli.utils import format_storage_config
|
||||||
|
rv = autodoc.ClassDocumenter.get_doc(self, encoding, ignore)
|
||||||
|
config = [u' ' + x for x in format_storage_config(self.object)]
|
||||||
|
rv[0] = rv[0][:1] + [u'::', u''] + config + [u''] + rv[0][1:]
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
from sphinx.domains.python import PyObject
|
from sphinx.domains.python import PyObject
|
||||||
|
app.add_object_type('storage', 'storage', 'pair: %s; storage',
|
||||||
app.add_object_type(
|
doc_field_types=PyObject.doc_field_types)
|
||||||
"storage",
|
app.add_role('gh', github_issue_role)
|
||||||
"storage",
|
app.add_role('ghpr', github_issue_role)
|
||||||
"pair: %s; storage",
|
app.add_autodocumenter(StorageDocumenter)
|
||||||
doc_field_types=PyObject.doc_field_types,
|
|
||||||
)
|
|
||||||
app.add_role("gh", github_issue_role)
|
|
||||||
app.add_role("ghpr", github_issue_role)
|
|
||||||
|
|
|
||||||
430
docs/config.rst
430
docs/config.rst
|
|
@ -27,6 +27,7 @@ General Section
|
||||||
|
|
||||||
[general]
|
[general]
|
||||||
status_path = ...
|
status_path = ...
|
||||||
|
#password_command =
|
||||||
|
|
||||||
|
|
||||||
- ``status_path``: A directory where vdirsyncer will store some additional data
|
- ``status_path``: A directory where vdirsyncer will store some additional data
|
||||||
|
|
@ -57,86 +58,37 @@ Pair Section
|
||||||
|
|
||||||
- ``a`` and ``b`` reference the storages to sync by their names.
|
- ``a`` and ``b`` reference the storages to sync by their names.
|
||||||
|
|
||||||
- ``collections``: A list of collections to synchronize when ``vdirsyncer
|
- ``collections``: A list of collections to synchronize when
|
||||||
sync`` is executed. See also :ref:`collections_tutorial`.
|
``vdirsyncer sync`` is executed.
|
||||||
|
|
||||||
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
||||||
autodiscovery on a specific storage. It means all the collections on side A /
|
autodiscovery on a specific storage.
|
||||||
side B.
|
|
||||||
|
|
||||||
If the collection you want to sync doesn't have the same name on each side,
|
|
||||||
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
|
||||||
This will synchronize the collection ``name_a`` on side A with the collection
|
|
||||||
``name_b`` on side B. The ``config_name`` will be used for representation in
|
|
||||||
CLI arguments and logging.
|
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
|
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
|
||||||
the collections from side B, and also the collections named "foo" and "bar".
|
collections from side B, and also the collections named "foo" and "bar".
|
||||||
|
|
||||||
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
- ``collections = ["from b", from a"]`` makes vdirsyncer synchronize all
|
||||||
existing collections on either side.
|
existing collections on either side.
|
||||||
|
|
||||||
- ``collections = [["bar", "bar_a", "bar_b"], "foo"]`` makes vdirsyncer
|
|
||||||
synchronize ``bar_a`` from side A with ``bar_b`` from side B, and also
|
|
||||||
synchronize ``foo`` on both sides with each other.
|
|
||||||
|
|
||||||
- ``conflict_resolution``: Optional, define how conflicts should be handled. A
|
- ``conflict_resolution``: Optional, define how conflicts should be handled. A
|
||||||
conflict occurs when one item (event, task) changed on both sides since the
|
conflict occurs when one item (event, task) changed on both sides since the
|
||||||
last sync. See also :ref:`conflict_resolution_tutorial`.
|
last sync.
|
||||||
|
|
||||||
Valid values are:
|
Valid values are:
|
||||||
|
|
||||||
- ``null``, where an error is shown and no changes are done.
|
|
||||||
- ``"a wins"`` and ``"b wins"``, where the whole item is taken from one side.
|
- ``"a wins"`` and ``"b wins"``, where the whole item is taken from one side.
|
||||||
- ``["command", "vimdiff"]``: ``vimdiff <a> <b>`` will be called where
|
Vdirsyncer will not attempt to merge the two items.
|
||||||
``<a>`` and ``<b>`` are temporary files that contain the item of each side
|
- ``null``, the default, where an error is shown and no changes are done.
|
||||||
respectively. The files need to be exactly the same when the command
|
|
||||||
returns.
|
|
||||||
|
|
||||||
- ``vimdiff`` can be replaced with any other command. For example, in POSIX
|
|
||||||
``["command", "cp"]`` is equivalent to ``"a wins"``.
|
|
||||||
- Additional list items will be forwarded as arguments. For example,
|
|
||||||
``["command", "vimdiff", "--noplugin"]`` runs ``vimdiff --noplugin``.
|
|
||||||
|
|
||||||
Vdirsyncer never attempts to "automatically merge" the two items.
|
|
||||||
|
|
||||||
.. _partial_sync_def:
|
|
||||||
|
|
||||||
- ``partial_sync``: Assume A is read-only, B not. If you change items on B,
|
|
||||||
vdirsyncer can't sync the changes to A. What should happen instead?
|
|
||||||
|
|
||||||
- ``error``: An error is shown.
|
|
||||||
- ``ignore``: The change is ignored. However: Events deleted in B still
|
|
||||||
reappear if they're updated in A.
|
|
||||||
- ``revert`` (default): The change is reverted on next sync.
|
|
||||||
|
|
||||||
See also :ref:`partial_sync_tutorial`.
|
|
||||||
|
|
||||||
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
||||||
metasync`` is executed. Example::
|
metasync`` is executed. Example::
|
||||||
|
|
||||||
metadata = ["color", "displayname", "description", "order"]
|
metadata = ["color", "displayname"]
|
||||||
|
|
||||||
This synchronizes the following properties:
|
This synchronizes the ``color`` and the ``displayname`` properties. The
|
||||||
|
``conflict_resolution`` parameter applies here as well.
|
||||||
- color: ``http://apple.com/ns/ical/:calendar-color``
|
|
||||||
- displayname: ``DAV:displayname``
|
|
||||||
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
|
|
||||||
- order: ``http://apple.com/ns/ical/:calendar-order``
|
|
||||||
|
|
||||||
The ``conflict_resolution`` parameter applies for these properties too.
|
|
||||||
|
|
||||||
.. _implicit_def:
|
|
||||||
|
|
||||||
- ``implicit``: Opt into implicitly creating collections. Example::
|
|
||||||
|
|
||||||
implicit = "create"
|
|
||||||
|
|
||||||
When set to "create", missing collections are automatically created on both
|
|
||||||
sides during sync without prompting the user. This simplifies workflows where
|
|
||||||
all collections should be synchronized bidirectionally.
|
|
||||||
|
|
||||||
.. _storage_config:
|
.. _storage_config:
|
||||||
|
|
||||||
|
|
@ -164,363 +116,51 @@ Storage Section
|
||||||
Supported Storages
|
Supported Storages
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
Read-write storages
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
These storages generally support reading and changing of their items. Their
|
||||||
|
default value for ``read_only`` is ``false``, but can be set to ``true`` if
|
||||||
|
wished.
|
||||||
|
|
||||||
CalDAV and CardDAV
|
CalDAV and CardDAV
|
||||||
++++++++++++++++++
|
++++++++++++++++++
|
||||||
|
|
||||||
.. note::
|
.. autostorage:: vdirsyncer.storage.dav.CaldavStorage
|
||||||
|
|
||||||
Please also see :ref:`supported-servers`, as some servers may not work
|
.. autostorage:: vdirsyncer.storage.dav.CarddavStorage
|
||||||
well.
|
|
||||||
|
|
||||||
.. storage:: caldav
|
remoteStorage
|
||||||
|
+++++++++++++
|
||||||
|
|
||||||
CalDAV.
|
`remoteStorage <https://remotestorage.io/>`_ is an open per-user data storage
|
||||||
|
protocol. Vdirsyncer contains **highly experimental support** for it.
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_caldav]
|
|
||||||
type = "caldav"
|
|
||||||
#start_date = null
|
|
||||||
#end_date = null
|
|
||||||
#item_types = []
|
|
||||||
url = "..."
|
|
||||||
#username = ""
|
|
||||||
#password = ""
|
|
||||||
#verify = /path/to/custom_ca.pem
|
|
||||||
#auth = null
|
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
|
||||||
#verify_fingerprint = null
|
|
||||||
#auth_cert = null
|
|
||||||
|
|
||||||
You can set a timerange to synchronize with the parameters ``start_date``
|
|
||||||
and ``end_date``. Inside those parameters, you can use any Python
|
|
||||||
expression to return a valid :py:class:`datetime.datetime` object. For
|
|
||||||
example, the following would synchronize the timerange from one year in the
|
|
||||||
past to one year in the future::
|
|
||||||
|
|
||||||
start_date = "datetime.now() - timedelta(days=365)"
|
|
||||||
end_date = "datetime.now() + timedelta(days=365)"
|
|
||||||
|
|
||||||
Either both or none have to be specified. The default is to synchronize
|
|
||||||
everything.
|
|
||||||
|
|
||||||
You can set ``item_types`` to restrict the *kind of items* you want to
|
|
||||||
synchronize. For example, if you want to only synchronize events (but don't
|
|
||||||
download any tasks from the server), set ``item_types = ["VEVENT"]``. If
|
|
||||||
you want to synchronize events and tasks, but have some ``VJOURNAL`` items
|
|
||||||
on the server you don't want to synchronize, use ``item_types = ["VEVENT",
|
|
||||||
"VTODO"]``.
|
|
||||||
|
|
||||||
:param start_date: Start date of timerange to show, default -inf.
|
|
||||||
:param end_date: End date of timerange to show, default +inf.
|
|
||||||
:param item_types: Kind of items to show. The default, the empty list, is
|
|
||||||
to show all. This depends on particular features on the server, the
|
|
||||||
results are not validated.
|
|
||||||
:param url: Base URL or an URL to a calendar.
|
|
||||||
:param username: Username for authentication.
|
|
||||||
:param password: Password for authentication.
|
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
|
||||||
See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
|
||||||
didn't request them. This saves from an additional roundtrip per
|
|
||||||
request. Consider setting ``guess`` if this causes issues with your
|
|
||||||
server.
|
|
||||||
:param auth_cert: Optional. Either a path to a certificate with a client
|
|
||||||
certificate and the key or a list of paths to the files with them.
|
|
||||||
:param useragent: Default ``vdirsyncer``.
|
|
||||||
|
|
||||||
|
|
||||||
.. storage:: carddav
|
|
||||||
|
|
||||||
CardDAV.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_carddav]
|
|
||||||
type = "carddav"
|
|
||||||
url = "..."
|
|
||||||
#username = ""
|
|
||||||
#password = ""
|
|
||||||
#verify = /path/to/custom_ca.pem
|
|
||||||
#auth = null
|
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
|
||||||
#verify_fingerprint = null
|
|
||||||
#auth_cert = null
|
|
||||||
#use_vcard_4 = false
|
|
||||||
|
|
||||||
:param url: Base URL or an URL to an addressbook.
|
|
||||||
:param username: Username for authentication.
|
|
||||||
:param password: Password for authentication.
|
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
|
||||||
See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
|
||||||
default is preemptive Basic auth, sending credentials even if
|
|
||||||
server didn't request them. This saves from an additional
|
|
||||||
roundtrip per request. Consider setting ``guess`` if this
|
|
||||||
causes issues with your server.
|
|
||||||
:param auth_cert: Optional. Either a path to a certificate with a client
|
|
||||||
certificate and the key or a list of paths to the files
|
|
||||||
with them.
|
|
||||||
:param useragent: Default ``vdirsyncer``.
|
|
||||||
:param use_vcard_4: Whether the server use vCard 4.0.
|
|
||||||
|
|
||||||
Google
|
|
||||||
++++++
|
|
||||||
|
|
||||||
Vdirsyncer supports synchronization with Google calendars with the restriction
|
|
||||||
that ``VTODO`` files are rejected by the server.
|
|
||||||
|
|
||||||
Synchronization with Google contacts is less reliable due to negligence of
|
|
||||||
Google's CardDAV API. **Google's CardDAV implementation is allegedly a disaster
|
|
||||||
in terms of data safety**. See `this blog post
|
|
||||||
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
|
||||||
up your data.
|
|
||||||
|
|
||||||
Another caveat is that Google group labels are not synced with vCard's
|
|
||||||
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
|
|
||||||
(also see :gh:`814` and
|
|
||||||
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
|
|
||||||
for reference) and the
|
|
||||||
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
|
|
||||||
synced when only partial date information is present (e.g. the year is missing).
|
|
||||||
|
|
||||||
At first run you will be asked to authorize application for Google account
|
|
||||||
access.
|
|
||||||
|
|
||||||
To use this storage type, you need to install some additional dependencies::
|
|
||||||
|
|
||||||
pip install vdirsyncer[google]
|
|
||||||
|
|
||||||
Furthermore you need to register vdirsyncer as an application yourself to
|
|
||||||
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
|
||||||
Service to hardcode those into opensource software [googleterms]_:
|
|
||||||
|
|
||||||
1. Go to the `Google API Manager <https://console.developers.google.com>`_
|
|
||||||
|
|
||||||
2. Create a new project under any name.
|
|
||||||
|
|
||||||
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
|
||||||
Calendar and Contacts APIs, those are different and won't work). There should
|
|
||||||
be a search box where you can just enter those terms.
|
|
||||||
|
|
||||||
3. In the sidebar, select "Credentials", then "Create Credentials" and create a
|
|
||||||
new "OAuth Client ID".
|
|
||||||
|
|
||||||
You'll be prompted to create a OAuth consent screen first. Fill out that
|
|
||||||
form however you like.
|
|
||||||
|
|
||||||
After setting up the consent screen, finish creating the new "OAuth Client
|
|
||||||
ID'. The correct application type is "Desktop application".
|
|
||||||
|
|
||||||
4. Finally you should have a Client ID and a Client secret. Provide these in
|
|
||||||
your storage config.
|
|
||||||
|
|
||||||
The ``token_file`` parameter should be a path to a file where vdirsyncer can
|
|
||||||
later store authentication-related data. You do not need to create the file
|
|
||||||
itself or write anything to it.
|
|
||||||
|
|
||||||
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
|
||||||
section "Confidential Matters".
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
You need to configure which calendars Google should offer vdirsyncer using
|
Do not use this storage if you're not prepared for data-loss and breakage.
|
||||||
a secret `settings page
|
|
||||||
<https://calendar.google.com/calendar/syncselect>`_.
|
|
||||||
|
|
||||||
.. storage:: google_calendar
|
To use them, you need to install some optional dependencies with::
|
||||||
|
|
||||||
Google calendar.
|
pip install vdirsyncer[remotestorage]
|
||||||
|
|
||||||
::
|
.. autostorage:: vdirsyncer.storage.remotestorage.RemoteStorageContacts
|
||||||
|
|
||||||
[storage example_for_google_calendar]
|
.. autostorage:: vdirsyncer.storage.remotestorage.RemoteStorageCalendars
|
||||||
type = "google_calendar"
|
|
||||||
token_file = "..."
|
|
||||||
client_id = "..."
|
|
||||||
client_secret = "..."
|
|
||||||
#start_date = null
|
|
||||||
#end_date = null
|
|
||||||
#item_types = []
|
|
||||||
|
|
||||||
Please refer to :storage:`caldav` regarding the ``item_types`` and timerange parameters.
|
|
||||||
|
|
||||||
:param token_file: A filepath where access tokens are stored.
|
|
||||||
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
|
||||||
API Manager.
|
|
||||||
|
|
||||||
.. storage:: google_contacts
|
|
||||||
|
|
||||||
Google contacts.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_google_contacts]
|
|
||||||
type = "google_contacts"
|
|
||||||
token_file = "..."
|
|
||||||
client_id = "..."
|
|
||||||
client_secret = "..."
|
|
||||||
|
|
||||||
:param token_file: A filepath where access tokens are stored.
|
|
||||||
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
|
||||||
API Manager.
|
|
||||||
|
|
||||||
The current flow is not ideal, but Google has deprecated the previous APIs used
|
|
||||||
for this without providing a suitable replacement. See :gh:`975` for discussion
|
|
||||||
on the topic.
|
|
||||||
|
|
||||||
Local
|
Local
|
||||||
+++++
|
+++++
|
||||||
|
|
||||||
.. storage:: filesystem
|
.. autostorage:: vdirsyncer.storage.filesystem.FilesystemStorage
|
||||||
|
|
||||||
Saves each item in its own file, given a directory.
|
.. autostorage:: vdirsyncer.storage.singlefile.SingleFileStorage
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_filesystem]
|
|
||||||
type = "filesystem"
|
|
||||||
path = "..."
|
|
||||||
fileext = "..."
|
|
||||||
#encoding = "utf-8"
|
|
||||||
#post_hook = null
|
|
||||||
#pre_deletion_hook = null
|
|
||||||
#fileignoreext = ".tmp"
|
|
||||||
|
|
||||||
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
|
||||||
a more formal description of the format.
|
|
||||||
|
|
||||||
Directories with a leading dot are ignored to make usage of e.g. version
|
|
||||||
control easier.
|
|
||||||
|
|
||||||
:param path: Absolute path to a vdir/collection. If this is used in
|
|
||||||
combination with the ``collections`` parameter in a pair-section, this
|
|
||||||
should point to a directory of vdirs instead.
|
|
||||||
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the
|
|
||||||
href, so if you change the file extension after a sync, this will
|
|
||||||
trigger a re-download of everything (but *should* not cause data-loss
|
|
||||||
of any kind). To be compatible with the ``vset`` format you have
|
|
||||||
to either use ``.vcf`` or ``.ics``. Note that metasync won't work
|
|
||||||
if you use an empty string here.
|
|
||||||
:param encoding: File encoding for items, both content and filename.
|
|
||||||
:param post_hook: A command to call for each item creation and
|
|
||||||
modification. The command will be called with the path of the
|
|
||||||
new/updated file.
|
|
||||||
:param pre_deletion_hook: A command to call for each item deletion.
|
|
||||||
The command will be called with the path of the deleted file.
|
|
||||||
:param fileeignoreext: The file extention to ignore. It is only useful
|
|
||||||
if fileext is set to the empty string. The default is ``.tmp``.
|
|
||||||
|
|
||||||
.. storage:: singlefile
|
|
||||||
|
|
||||||
Save data in single local ``.vcf`` or ``.ics`` file.
|
|
||||||
|
|
||||||
The storage basically guesses how items should be joined in the file.
|
|
||||||
|
|
||||||
.. versionadded:: 0.1.6
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
This storage is very slow, and that is unlikely to change. You should
|
|
||||||
consider using :storage:`filesystem` if it fits your usecase.
|
|
||||||
|
|
||||||
:param path: The filepath to the file to be written to. If collections are
|
|
||||||
used, this should contain ``%s`` as a placeholder for the collection
|
|
||||||
name.
|
|
||||||
:param encoding: Which encoding the file should use. Defaults to UTF-8.
|
|
||||||
|
|
||||||
Example for syncing with :storage:`caldav`::
|
|
||||||
|
|
||||||
[pair my_calendar]
|
|
||||||
a = my_calendar_local
|
|
||||||
b = my_calendar_remote
|
|
||||||
collections = ["from a", "from b"]
|
|
||||||
|
|
||||||
[storage my_calendar_local]
|
|
||||||
type = "singlefile"
|
|
||||||
path = ~/.calendars/%s.ics
|
|
||||||
|
|
||||||
[storage my_calendar_remote]
|
|
||||||
type = "caldav"
|
|
||||||
url = https://caldav.example.org/
|
|
||||||
#username =
|
|
||||||
#password =
|
|
||||||
|
|
||||||
Example for syncing with :storage:`caldav` using a ``null`` collection::
|
|
||||||
|
|
||||||
[pair my_calendar]
|
|
||||||
a = my_calendar_local
|
|
||||||
b = my_calendar_remote
|
|
||||||
|
|
||||||
[storage my_calendar_local]
|
|
||||||
type = "singlefile"
|
|
||||||
path = ~/my_calendar.ics
|
|
||||||
|
|
||||||
[storage my_calendar_remote]
|
|
||||||
type = "caldav"
|
|
||||||
url = https://caldav.example.org/username/my_calendar/
|
|
||||||
#username =
|
|
||||||
#password =
|
|
||||||
|
|
||||||
Read-only storages
|
Read-only storages
|
||||||
++++++++++++++++++
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
These storages don't support writing of their items, consequently ``read_only``
|
These storages don't support writing of their items, consequently ``read_only``
|
||||||
is set to ``true`` by default. Changing ``read_only`` to ``false`` on them
|
is set to ``true`` by default. Changing ``read_only`` to ``false`` on them
|
||||||
leads to an error.
|
leads to an error.
|
||||||
|
|
||||||
.. storage:: http
|
.. autostorage:: vdirsyncer.storage.http.HttpStorage
|
||||||
|
|
||||||
Use a simple ``.ics`` file (or similar) from the web.
|
|
||||||
``webcal://``-calendars are supposed to be used with this, but you have to
|
|
||||||
replace ``webcal://`` with ``http://``, or better, ``https://``.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[pair holidays]
|
|
||||||
a = holidays_local
|
|
||||||
b = holidays_remote
|
|
||||||
collections = null
|
|
||||||
|
|
||||||
[storage holidays_local]
|
|
||||||
type = "filesystem"
|
|
||||||
path = ~/.config/vdir/calendars/holidays/
|
|
||||||
fileext = .ics
|
|
||||||
|
|
||||||
[storage holidays_remote]
|
|
||||||
type = "http"
|
|
||||||
url = https://example.com/holidays_from_hicksville.ics
|
|
||||||
#filter_hook = null
|
|
||||||
|
|
||||||
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
|
||||||
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
|
||||||
This leads many synchronization programs to believe that all events have
|
|
||||||
been deleted and new ones created, and accordingly causes a lot of
|
|
||||||
unnecessary uploads and deletions on the other side. Vdirsyncer completely
|
|
||||||
ignores UIDs coming from :storage:`http` and will replace them with a hash
|
|
||||||
of the normalized item content.
|
|
||||||
|
|
||||||
:param url: URL to the ``.ics`` file.
|
|
||||||
:param username: Username for authentication.
|
|
||||||
:param password: Password for authentication.
|
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
|
||||||
See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
|
||||||
didn't request them. This saves from an additional roundtrip per
|
|
||||||
request. Consider setting ``guess`` if this causes issues with your
|
|
||||||
server.
|
|
||||||
:param auth_cert: Optional. Either a path to a certificate with a client
|
|
||||||
certificate and the key or a list of paths to the files with them.
|
|
||||||
:param useragent: Default ``vdirsyncer``.
|
|
||||||
:param filter_hook: Optional. A filter command to call for each fetched
|
|
||||||
item, passed in raw form to stdin and returned via stdout.
|
|
||||||
If nothing is returned by the filter command, the item is skipped.
|
|
||||||
This can be used to alter fields as needed when dealing with providers
|
|
||||||
generating malformed events.
|
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,12 @@
|
||||||
Support and Contact
|
Support and Contact
|
||||||
===================
|
===================
|
||||||
|
|
||||||
* The ``#pimutils`` `IRC channel on Libera.Chat <https://pimutils.org/contact>`_
|
* The ``#vdirsyncer`` IRC channel on Freenode might be active, depending on
|
||||||
might be active, depending on your timezone. Use it for support and general
|
your timezone. Use it for support and general (including off-topic)
|
||||||
(including off-topic) discussion.
|
discussion.
|
||||||
|
|
||||||
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
* Open `a GitHub issue <https://github.com/untitaker/vdirsyncer/issues/>`_ for
|
||||||
concrete bug reports and feature requests.
|
concrete bug reports and feature requests.
|
||||||
|
|
||||||
* For security issues, contact ``contact@pimutils.org``.
|
* Lastly, you can also `contact the author directly
|
||||||
|
<https://unterwaditzer.net/contact.html>`_.
|
||||||
|
|
|
||||||
|
|
@ -2,63 +2,27 @@
|
||||||
Contributing to this project
|
Contributing to this project
|
||||||
============================
|
============================
|
||||||
|
|
||||||
.. note::
|
**Important:** Please read :doc:`contact` for questions and support requests.
|
||||||
|
|
||||||
- Please read :doc:`contact` for questions and support requests.
|
|
||||||
|
|
||||||
- All participants must follow the `pimutils Code of Conduct
|
|
||||||
<http://pimutils.org/coc>`_.
|
|
||||||
|
|
||||||
The issue tracker
|
|
||||||
=================
|
|
||||||
|
|
||||||
We use `GitHub issues <https://github.com/pimutils/vdirsyncer/issues>`_ for
|
|
||||||
organizing bug reports and feature requests.
|
|
||||||
|
|
||||||
The following `labels <https://github.com/pimutils/vdirsyncer/labels>`_ are of
|
|
||||||
interest:
|
|
||||||
|
|
||||||
* "Planning" is for issues that are still undecided, but where at least some
|
|
||||||
discussion exists.
|
|
||||||
|
|
||||||
* "Blocked" is for issues that can't be worked on at the moment because some
|
|
||||||
other unsolved problem exists. This problem may be a bug in some software
|
|
||||||
dependency, for instance.
|
|
||||||
|
|
||||||
* "Ready" contains issues that are ready to work on.
|
|
||||||
|
|
||||||
If you just want to get started with contributing, the "ready" issues are an
|
|
||||||
option. Issues that are still in "Planning" are also an option, but require
|
|
||||||
more upfront thinking and may turn out to be impossible to solve, or at least
|
|
||||||
harder than anticipated. On the flip side those tend to be the more interesting
|
|
||||||
issues as well, depending on how one looks at it.
|
|
||||||
|
|
||||||
All of those labels are also available as a kanban board on `waffle.io
|
|
||||||
<https://waffle.io/pimutils/vdirsyncer>`_. It is really just an alternative
|
|
||||||
overview over all issues, but might be easier to comprehend.
|
|
||||||
|
|
||||||
Feel free to :doc:`contact <contact>` me or comment on the relevant issues for
|
|
||||||
further information.
|
|
||||||
|
|
||||||
Reporting bugs
|
Reporting bugs
|
||||||
--------------
|
==============
|
||||||
|
|
||||||
* Make sure your problem isn't already listed in :doc:`problems`.
|
* Make sure your problem isn't already listed in `Known Problems
|
||||||
|
<https://vdirsyncer.readthedocs.org/en/stable/problems.html>`_.
|
||||||
|
|
||||||
* Make sure you have the absolutely latest version of vdirsyncer. For users of
|
* Make sure you have the latest version by executing ``pip install --user
|
||||||
some Linux distributions such as Debian or Fedora this may not be the version
|
--upgrade vdirsyncer``.
|
||||||
that your distro offers. In those cases please file a bug against the distro
|
|
||||||
package, not against upstream vdirsyncer.
|
|
||||||
|
|
||||||
* Use ``--verbosity=DEBUG`` when including output from vdirsyncer.
|
* Use ``--verbosity=DEBUG`` when including output from vdirsyncer.
|
||||||
|
|
||||||
Suggesting features
|
Suggesting features
|
||||||
-------------------
|
===================
|
||||||
|
|
||||||
If you're suggesting a feature, keep in mind that vdirsyncer tries not to be a
|
If you're suggesting a feature, keep in mind that vdirsyncer tries not to be a
|
||||||
full calendar or contacts client, but rather just the piece of software that
|
full calendar or contacts client, but rather just the piece of software that
|
||||||
synchronizes all the data. :doc:`Take a look at the documentation for software
|
synchronizes all the data. `Take a look at the documentation for software
|
||||||
working with vdirsyncer <tutorials/index>`.
|
working with vdirsyncer
|
||||||
|
<http://vdirsyncer.readthedocs.org/en/latest/supported.html>`_.
|
||||||
|
|
||||||
Submitting patches, pull requests
|
Submitting patches, pull requests
|
||||||
=================================
|
=================================
|
||||||
|
|
@ -73,55 +37,20 @@ Submitting patches, pull requests
|
||||||
* Add yourself to ``AUTHORS.rst``, and add a note to ``CHANGELOG.rst`` too.
|
* Add yourself to ``AUTHORS.rst``, and add a note to ``CHANGELOG.rst`` too.
|
||||||
|
|
||||||
Running tests, how to set up your development environment
|
Running tests, how to set up your development environment
|
||||||
---------------------------------------------------------
|
=========================================================
|
||||||
|
|
||||||
For many patches, it might suffice to just let CI run the tests. However,
|
For many patches, it might suffice to just let Travis run the tests. However,
|
||||||
CI is slow, so you might want to run them locally too. For this, set up a
|
Travis is slow, so you might want to run them locally too. For this, set up a
|
||||||
virtualenv_ and run this inside of it::
|
virtualenv_ and run this inside of it::
|
||||||
|
|
||||||
# Install development dependencies, including:
|
make install-test
|
||||||
# - vdirsyncer from the repo into the virtualenv
|
make install-style
|
||||||
# - style checks and formatting (ruff)
|
|
||||||
make install-dev
|
|
||||||
|
|
||||||
# Install git commit hook for some extra linting and checking
|
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
Then you can run::
|
Then you can run::
|
||||||
|
|
||||||
pytest # The normal testsuite
|
make test
|
||||||
pre-commit run --all # Run all linters (which also run via pre-commit)
|
make style # Stylechecker
|
||||||
make -C docs html # Build the HTML docs, output is at docs/_build/html/
|
|
||||||
make -C docs linkcheck # Check docs for any broken links
|
|
||||||
|
|
||||||
The ``Makefile`` has a lot of options that allow you to control which tests are
|
|
||||||
run, and which servers are tested. Take a look at its code where they are all
|
|
||||||
initialized and documented.
|
|
||||||
|
|
||||||
To tests against a specific DAV server, use ``DAV_SERVER``::
|
|
||||||
|
|
||||||
make DAV_SERVER=xandikos test
|
|
||||||
|
|
||||||
The server will be initialised in a docker container and terminated at the end
|
|
||||||
of the test suite.
|
|
||||||
|
|
||||||
If you have any questions, feel free to open issues about it.
|
If you have any questions, feel free to open issues about it.
|
||||||
|
|
||||||
Structure of the testsuite
|
.. _virtualenv: http://virtualenv.readthedocs.org/
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Within ``tests/``, there are three main folders:
|
|
||||||
|
|
||||||
- ``system`` contains system- and also integration tests. A rough rule is: If
|
|
||||||
the test is using temporary files, put it here.
|
|
||||||
|
|
||||||
- ``unit``, where each testcase tests a single class or function.
|
|
||||||
|
|
||||||
- ``storage`` runs a generic storage testsuite against all storages.
|
|
||||||
|
|
||||||
The reason for this separation is: We are planning to generate separate
|
|
||||||
coverage reports for each of those testsuites. Ideally ``unit`` would generate
|
|
||||||
palatable coverage of the entire codebase *on its own*, and the *combination*
|
|
||||||
of ``system`` and ``storage`` as well.
|
|
||||||
|
|
||||||
.. _virtualenv: http://virtualenv.readthedocs.io/
|
|
||||||
|
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
=========
|
|
||||||
Donations
|
|
||||||
=========
|
|
||||||
|
|
||||||
vdirsyncer is and will always be free and open source software. We appreciate
|
|
||||||
sponsors willing to fund our continued work on it.
|
|
||||||
|
|
||||||
If you found my work useful, please consider donating. Thank you!
|
|
||||||
|
|
||||||
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``.
|
|
||||||
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
|
|
||||||
- Sponsor via recurring donations `via liberapay`_.
|
|
||||||
|
|
||||||
.. _via Ko-fi: https://ko-fi.com/whynothugo
|
|
||||||
.. _via liberapay: https://liberapay.com/WhyNotHugo/
|
|
||||||
|
|
@ -2,49 +2,34 @@
|
||||||
vdirsyncer
|
vdirsyncer
|
||||||
==========
|
==========
|
||||||
|
|
||||||
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
- `Documentation <https://vdirsyncer.readthedocs.org/en/stable/>`_
|
||||||
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
- `Source code <https://github.com/untitaker/vdirsyncer>`_
|
||||||
|
|
||||||
Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks
|
Vdirsyncer synchronizes your calendars and addressbooks between two
|
||||||
between a variety of servers and the local filesystem. The most popular usecase
|
:ref:`storages <storages>`. The most popular purpose is to synchronize a
|
||||||
is to synchronize a server with a local folder and use a set of other
|
CalDAV/CardDAV server with a local folder or file. The local data can then be
|
||||||
:doc:`programs <tutorials/index>` to change the local events and contacts.
|
accessed via a variety of :doc:`programs <supported>`, none of which have to
|
||||||
Vdirsyncer can then synchronize those changes back to the server.
|
know or worry about syncing to a server.
|
||||||
|
|
||||||
However, vdirsyncer is not limited to synchronizing between clients and
|
It aims to be for CalDAV and CardDAV what `OfflineIMAP
|
||||||
servers. It can also be used to synchronize calendars and/or addressbooks
|
<http://offlineimap.org/>`_ is for IMAP.
|
||||||
between two servers directly.
|
|
||||||
|
|
||||||
It aims to be for calendars and contacts what `OfflineIMAP
|
Table of Contents
|
||||||
<http://offlineimap.org/>`_ is for emails.
|
=================
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:caption: Users
|
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
when
|
when
|
||||||
installation
|
|
||||||
tutorial
|
tutorial
|
||||||
ssl-tutorial
|
ssl-tutorial
|
||||||
keyring
|
keyring
|
||||||
partial-sync
|
|
||||||
config
|
config
|
||||||
tutorials/index
|
supported
|
||||||
problems
|
problems
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:caption: Developers
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
contributing
|
|
||||||
vdir
|
vdir
|
||||||
|
contributing
|
||||||
.. toctree::
|
|
||||||
:caption: General
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
packaging
|
packaging
|
||||||
contact
|
contact
|
||||||
changelog
|
changelog
|
||||||
license
|
license
|
||||||
donations
|
|
||||||
|
|
|
||||||
|
|
@ -1,122 +0,0 @@
|
||||||
.. _installation:
|
|
||||||
|
|
||||||
============
|
|
||||||
Installation
|
|
||||||
============
|
|
||||||
|
|
||||||
OS/distro packages
|
|
||||||
------------------
|
|
||||||
|
|
||||||
The following packages are community-contributed and were up-to-date at the
|
|
||||||
time of writing:
|
|
||||||
|
|
||||||
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
|
|
||||||
- `Ubuntu and Debian, x86_64-only
|
|
||||||
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
|
||||||
in the official repositories but may be out of date)
|
|
||||||
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
|
|
||||||
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
|
|
||||||
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
|
|
||||||
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
|
||||||
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
|
|
||||||
|
|
||||||
We only support the latest version of vdirsyncer, which is at the time of this
|
|
||||||
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
|
||||||
version**.
|
|
||||||
|
|
||||||
Some distributions have multiple release channels. Debian and Fedora for
|
|
||||||
example have a "stable" release channel that ships an older version of
|
|
||||||
vdirsyncer. Those versions aren't supported either.
|
|
||||||
|
|
||||||
If there is no suitable package for your distribution, you'll need to
|
|
||||||
:ref:`install vdirsyncer manually <manual-installation>`. There is an easy
|
|
||||||
command to copy-and-paste for this as well, but you should be aware of its
|
|
||||||
consequences.
|
|
||||||
|
|
||||||
.. _manual-installation:
|
|
||||||
|
|
||||||
Manual installation
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
If your distribution doesn't provide a package for vdirsyncer, you still can
|
|
||||||
use Python's package manager "pip". First, you'll have to check that the
|
|
||||||
following things are installed:
|
|
||||||
|
|
||||||
- Python 3.9 to 3.13 and pip.
|
|
||||||
- ``libxml`` and ``libxslt``
|
|
||||||
- ``zlib``
|
|
||||||
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
|
|
||||||
|
|
||||||
On Linux systems, using the distro's package manager is the best
|
|
||||||
way to do this, for example, using Ubuntu::
|
|
||||||
|
|
||||||
sudo apt-get install libxml2 libxslt1.1 zlib1g python3
|
|
||||||
|
|
||||||
Then you have several options. The following text applies for most Python
|
|
||||||
software by the way.
|
|
||||||
|
|
||||||
pipx: The clean, easy way
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
pipx_ is a new package manager for Python-based software that automatically
|
|
||||||
sets up a virtual environment for each program it installs. Please note that
|
|
||||||
installing via pipx will not include manual pages nor systemd services.
|
|
||||||
|
|
||||||
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
|
|
||||||
|
|
||||||
Assuming that pipx is installed, vdirsyncer can be installed with::
|
|
||||||
|
|
||||||
pipx install vdirsyncer
|
|
||||||
|
|
||||||
It can later be updated to the latest version with::
|
|
||||||
|
|
||||||
pipx upgrade vdirsyncer
|
|
||||||
|
|
||||||
And can be uninstalled with::
|
|
||||||
|
|
||||||
pipx uninstall vdirsyncer
|
|
||||||
|
|
||||||
This last command will remove vdirsyncer and any dependencies installed into
|
|
||||||
the above location.
|
|
||||||
|
|
||||||
.. _pipx: https://github.com/pipxproject/pipx
|
|
||||||
|
|
||||||
The dirty, easy way
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
If pipx is not available on your distribution, the easiest way to install
|
|
||||||
vdirsyncer at this point would be to run::
|
|
||||||
|
|
||||||
pip install --ignore-installed vdirsyncer
|
|
||||||
|
|
||||||
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
|
||||||
(see :ref:`debian-urllib3`).
|
|
||||||
|
|
||||||
This method has a major flaw though: Pip doesn't keep track of the files it
|
|
||||||
installs. Vdirsyncer's files would be located somewhere in
|
|
||||||
``~/.local/lib/python*``, but you can't possibly know which packages were
|
|
||||||
installed as dependencies of vdirsyncer and which ones were not, should you
|
|
||||||
decide to uninstall it. In other words, using pip that way would pollute your
|
|
||||||
home directory.
|
|
||||||
|
|
||||||
The clean, hard way
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
There is a way to install Python software without scattering stuff across
|
|
||||||
your filesystem: virtualenv_. There are a lot of resources on how to use it,
|
|
||||||
the simplest possible way would look something like::
|
|
||||||
|
|
||||||
virtualenv ~/vdirsyncer_env
|
|
||||||
~/vdirsyncer_env/bin/pip install vdirsyncer
|
|
||||||
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer"
|
|
||||||
|
|
||||||
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
|
||||||
|
|
||||||
This method has two advantages:
|
|
||||||
|
|
||||||
- It separately installs all Python packages into ``~/vdirsyncer_env/``,
|
|
||||||
without relying on the system packages. This works around OS- or
|
|
||||||
distro-specific issues.
|
|
||||||
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
|
||||||
|
|
||||||
.. _virtualenv: https://virtualenv.readthedocs.io/
|
|
||||||
|
|
@ -14,43 +14,30 @@ Command
|
||||||
Say you have the following configuration::
|
Say you have the following configuration::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
url = ...
|
url = ...
|
||||||
username = "foo"
|
username = foo
|
||||||
password = "bar"
|
password = bar
|
||||||
|
|
||||||
But it bugs you that the password is stored in cleartext in the config file.
|
But it bugs you that the password is stored in cleartext in the config file.
|
||||||
You can do this::
|
You can do this::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
url = ...
|
url = ...
|
||||||
username = "foo"
|
username = foo
|
||||||
password.fetch = ["command", "~/get-password.sh", "more", "args"]
|
password.fetch = ["command", "~/get-password.sh", "more", "args"]
|
||||||
|
|
||||||
You can fetch the username as well::
|
You can fetch the username as well::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
url = ...
|
url = ...
|
||||||
username.fetch = ["command", "~/get-username.sh"]
|
username.fetch = ["command", "~/get-username.sh"]
|
||||||
password.fetch = ["command", "~/get-password.sh"]
|
password.fetch = ["command", "~/get-password.sh"]
|
||||||
|
|
||||||
Or really any kind of parameter in a storage section.
|
Or really any kind of parameter in a storage section.
|
||||||
|
|
||||||
You can also pass the command as a string to be executed in a shell::
|
|
||||||
|
|
||||||
[storage foo]
|
|
||||||
...
|
|
||||||
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
|
|
||||||
|
|
||||||
With pass_ for example, you might find yourself writing something like this in
|
|
||||||
your configuration file::
|
|
||||||
|
|
||||||
password.fetch = ["command", "pass", "caldav"]
|
|
||||||
|
|
||||||
.. _pass: https://www.passwordstore.org/
|
|
||||||
|
|
||||||
Accessing the system keyring
|
Accessing the system keyring
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
|
|
@ -75,22 +62,6 @@ Password Prompt
|
||||||
You can also simply prompt for the password::
|
You can also simply prompt for the password::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
username = "myusername"
|
username = myusername
|
||||||
password.fetch = ["prompt", "Password for CalDAV"]
|
password.fetch = ["prompt", "Password for CalDAV"]
|
||||||
|
|
||||||
Environment variable
|
|
||||||
===============
|
|
||||||
|
|
||||||
To read the password from an environment variable::
|
|
||||||
|
|
||||||
[storage foo]
|
|
||||||
type = "caldav"
|
|
||||||
username = "myusername"
|
|
||||||
password.fetch = ["command", "printenv", "DAV_PW"]
|
|
||||||
|
|
||||||
This is especially handy if you use the same password multiple times
|
|
||||||
(say, for a CardDAV and a CalDAV storage).
|
|
||||||
On bash, you can read and export the password without printing::
|
|
||||||
|
|
||||||
read -s DAV_PW "DAV Password: " && export DAV_PW
|
|
||||||
|
|
|
||||||
|
|
@ -5,31 +5,30 @@ Packaging guidelines
|
||||||
Thank you very much for packaging vdirsyncer! The following guidelines should
|
Thank you very much for packaging vdirsyncer! The following guidelines should
|
||||||
help you to avoid some common pitfalls.
|
help you to avoid some common pitfalls.
|
||||||
|
|
||||||
If you find yourself needing to patch anything, or going in a different direction,
|
While they are called guidelines and therefore theoretically not mandatory, if
|
||||||
please open an issue so we can also address in a way that works for everyone. Otherwise
|
you consider going a different direction, please first open an issue or contact
|
||||||
we get bug reports for code or scenarios that don't exist in upstream vdirsycner.
|
me otherwise instead of just going ahead. These guidelines exist for my own
|
||||||
|
convenience too.
|
||||||
|
|
||||||
Obtaining the source code
|
Obtaining the source code
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
The main distribution channel is `PyPI
|
The main distribution channel is `PyPI
|
||||||
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
||||||
obtained there. We mirror the same package tarball and wheel as GitHub
|
obtained there. Do not use the ones from GitHub: Their tarballs contain useless
|
||||||
releases. Please do not confuse these with the auto-generated GitHub "Source
|
junk and are more of a distraction than anything else.
|
||||||
Code" tarball. Those are missing some important metadata and your build will fail.
|
|
||||||
|
|
||||||
We give each release a tag in the git repo. If you want to get notified of new
|
I give each release a tag in the git repo. If you want to get notified of new
|
||||||
releases, `GitHub's feed
|
releases, `GitHub's feed
|
||||||
<https://github.com/pimutils/vdirsyncer/releases.atom>`_ is a good way.
|
<https://github.com/untitaker/vdirsyncer/releases.atom>`_ is a good way.
|
||||||
|
|
||||||
Tags will be signed by the maintainer who is doing the release (starting with
|
|
||||||
0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the
|
|
||||||
tag itself is signed.
|
|
||||||
|
|
||||||
Dependency versions
|
Dependency versions
|
||||||
===================
|
===================
|
||||||
|
|
||||||
As with most Python packages, ``setup.py`` denotes the dependencies of
|
It is strongly discouraged to package vdirsyncer as a Python 2 application.
|
||||||
|
Future releases will only work on Python 3.3 and newer versions.
|
||||||
|
|
||||||
|
As with most Python packages, ``setup.py`` denotes the runtime dependencies of
|
||||||
vdirsyncer. It also contains lower-bound versions of each dependency. Older
|
vdirsyncer. It also contains lower-bound versions of each dependency. Older
|
||||||
versions will be rejected by the testsuite.
|
versions will be rejected by the testsuite.
|
||||||
|
|
||||||
|
|
@ -37,56 +36,30 @@ Testing
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Everything testing-related goes through the ``Makefile`` in the root of the
|
Everything testing-related goes through the ``Makefile`` in the root of the
|
||||||
repository or PyPI package. Trying to e.g. run ``pytest`` directly will
|
repository or PyPI package. Trying to e.g. run ``py.test`` directly will
|
||||||
require a lot of environment variables to be set (for configuration) and you
|
require a lot of environment variables to be set (for configuration) and you
|
||||||
probably don't want to deal with that.
|
probably don't want to deal with that.
|
||||||
|
|
||||||
You can install the all development dependencies with::
|
You can install the testing dependencies with ``make test-install``. You
|
||||||
|
probably don't want this since it will use pip to download the dependencies.
|
||||||
make install-dev
|
Alternatively you can find the testing dependencies in
|
||||||
|
``test-requirements.txt``, again with lower-bound version requirements.
|
||||||
You probably don't want this since it will use pip to download the
|
|
||||||
dependencies. Alternatively test dependencies are listed as ``test`` optional
|
|
||||||
dependencies in ``pyproject.toml``, again with lower-bound version
|
|
||||||
requirements.
|
|
||||||
|
|
||||||
You also have to have vdirsyncer fully installed at this point. Merely
|
You also have to have vdirsyncer fully installed at this point. Merely
|
||||||
``cd``-ing into the tarball will not be sufficient.
|
``cd``-ing into the tarball will not be sufficient.
|
||||||
|
|
||||||
Running the tests happens with::
|
Running the tests happens with ``make test``.
|
||||||
|
|
||||||
pytest
|
|
||||||
|
|
||||||
Hypothesis will randomly generate test input. If you care about deterministic
|
Hypothesis will randomly generate test input. If you care about deterministic
|
||||||
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
||||||
|
|
||||||
make DETERMINISTIC_TESTS=true test
|
make DETERMINISTIC_TESTS=true test
|
||||||
|
|
||||||
There are a lot of additional variables that allow you to test vdirsyncer
|
|
||||||
against a particular server. Those variables are not "stable" and may change
|
|
||||||
drastically between minor versions. Just don't use them, you are unlikely to
|
|
||||||
find bugs that vdirsyncer's CI hasn't found.
|
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
=============
|
=============
|
||||||
|
|
||||||
Using Sphinx_ you can generate the documentation you're reading right now in a
|
You can find a list of dependencies in ``docs-requirements.txt``.
|
||||||
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
|
||||||
take care of the HTML docs' formatting.
|
|
||||||
|
|
||||||
You can find a list of dependencies in ``pyproject.toml``, in the
|
Change into the ``docs/`` directory and build whatever format you want. That
|
||||||
``project.optional-dependencies`` section as ``docs``. Again, you can install
|
said, I only take care of the HTML docs' formatting -- other targets (such as
|
||||||
those using pip with::
|
the generated manpage) may look like garbage.
|
||||||
|
|
||||||
pip install '.[docs]'
|
|
||||||
|
|
||||||
Then change into the ``docs/`` directory and build whatever format you want
|
|
||||||
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
|
||||||
|
|
||||||
.. _Sphinx: www.sphinx-doc.org/
|
|
||||||
|
|
||||||
Contrib files
|
|
||||||
=============
|
|
||||||
|
|
||||||
Reference ``systemd.service`` and ``systemd.timer`` unit files are provided. It
|
|
||||||
is recommended to install this if your distribution is systemd-based.
|
|
||||||
|
|
|
||||||
|
|
@ -1,72 +0,0 @@
|
||||||
.. _partial_sync_tutorial:
|
|
||||||
|
|
||||||
===============================
|
|
||||||
Syncing with read-only storages
|
|
||||||
===============================
|
|
||||||
|
|
||||||
If you want to subscribe to a public, read-only `WebCAL
|
|
||||||
<https://en.wikipedia.org/wiki/Webcal>`_-calendar but neither your server nor
|
|
||||||
your calendar apps support that (or support it insufficiently), vdirsyncer can
|
|
||||||
be used to synchronize such a public calendar ``A`` with a new calendar ``B``
|
|
||||||
of your own and keep ``B`` updated.
|
|
||||||
|
|
||||||
Step 1: Create the target calendar
|
|
||||||
==================================
|
|
||||||
|
|
||||||
First you need to create the calendar you want to sync the WebCAL-calendar
|
|
||||||
with. Most servers offer a web interface for this. You then need to note the
|
|
||||||
CalDAV URL of your calendar. Note that this URL should directly point to the
|
|
||||||
calendar you just created, which means you would have one such URL for each
|
|
||||||
calendar you have.
|
|
||||||
|
|
||||||
Step 2: Creating the config
|
|
||||||
===========================
|
|
||||||
|
|
||||||
Paste this into your vdirsyncer config::
|
|
||||||
|
|
||||||
[pair holidays]
|
|
||||||
a = "holidays_public"
|
|
||||||
b = "holidays_private"
|
|
||||||
collections = null
|
|
||||||
|
|
||||||
[storage holidays_public]
|
|
||||||
type = "http"
|
|
||||||
# The URL to your iCalendar file.
|
|
||||||
url = "..."
|
|
||||||
|
|
||||||
[storage holidays_private]
|
|
||||||
type = "caldav"
|
|
||||||
# The direct URL to your calendar.
|
|
||||||
url = "..."
|
|
||||||
# The credentials to your CalDAV server
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
Then run ``vdirsyncer discover holidays`` and ``vdirsyncer sync holidays``, and
|
|
||||||
your previously created calendar should be filled with events.
|
|
||||||
|
|
||||||
Step 3: The partial_sync parameter
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. versionadded:: 0.14
|
|
||||||
|
|
||||||
You may get into a situation where you want to hide or modify some events from
|
|
||||||
your ``holidays`` calendar. If you try to do that at this point, you'll notice
|
|
||||||
that vdirsyncer will revert any changes you've made after a few times of
|
|
||||||
running ``sync``. This is because vdirsyncer wants to keep everything in sync,
|
|
||||||
and it can't synchronize changes to the public holidays-calendar because it
|
|
||||||
doesn't have the rights to do so.
|
|
||||||
|
|
||||||
For such purposes you can set the ``partial_sync`` parameter to ``ignore``::
|
|
||||||
|
|
||||||
[pair holidays]
|
|
||||||
a = "holidays_public"
|
|
||||||
b = "holidays_private"
|
|
||||||
collections = null
|
|
||||||
partial_sync = ignore
|
|
||||||
|
|
||||||
See :ref:`the config docs <partial_sync_def>` for more information.
|
|
||||||
|
|
||||||
.. _nextCloud: https://nextcloud.com/
|
|
||||||
.. _Baikal: http://sabre.io/baikal/
|
|
||||||
.. _DAViCal: http://www.davical.org/
|
|
||||||
|
|
@ -6,17 +6,96 @@ For any unanswered questions or problems, see :doc:`contact`.
|
||||||
|
|
||||||
.. _debian-urllib3:
|
.. _debian-urllib3:
|
||||||
|
|
||||||
Requests-related ImportErrors
|
Requests-related ImportErrors on Debian-based distributions
|
||||||
-----------------------------
|
-----------------------------------------------------------
|
||||||
|
|
||||||
ImportError: No module named packages.urllib3.poolmanager
|
ImportError: No module named packages.urllib3.poolmanager
|
||||||
|
|
||||||
ImportError: cannot import name iter_field_objects
|
ImportError: cannot import name iter_field_objects
|
||||||
|
|
||||||
Debian and nowadays even other distros make modifications to the ``requests``
|
Debian has had its problems in the past with the Python requests package, see
|
||||||
package that don't play well with packages assuming a normal ``requests``. This
|
:gh:`82` and :gh:`140`. You have several options for solving this problem:
|
||||||
is due to stubbornness on both sides.
|
|
||||||
|
|
||||||
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
- Set the ``auth`` parameter of :storage:`caldav`, :storage:`carddav`, and/or
|
||||||
around this, that is, to install vdirsyncer in a virtual environment, see
|
:storage:`http` to ``basic`` or ``digest`` (not ``guess``).
|
||||||
:ref:`manual-installation`.
|
|
||||||
|
- Upgrade your installation of the Debian requests package to at least version
|
||||||
|
``2.4.3-1``.
|
||||||
|
|
||||||
|
- If this doesn't help, install vdirsyncer in a virtualenv, see
|
||||||
|
:ref:`manual-installation`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _manual-installation:
|
||||||
|
|
||||||
|
Manual installation
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If your distribution doesn't provide a package for vdirsyncer, you still can
|
||||||
|
use Python's package manager "pip". First, you'll have to check that the
|
||||||
|
following things are installed:
|
||||||
|
|
||||||
|
- A compatible version of Python (2.7+ or 3.3+) and the corresponding pip package
|
||||||
|
- ``libxml`` and ``libxslt``
|
||||||
|
- ``zlib``
|
||||||
|
|
||||||
|
On Linux systems, using the distro's package manager is the best
|
||||||
|
way to do this, for example, using Ubuntu::
|
||||||
|
|
||||||
|
sudo apt-get install libxml2 libxslt1.1 zlib1g python
|
||||||
|
|
||||||
|
Then you have several options:
|
||||||
|
|
||||||
|
The dirty, easy way
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The easiest way to install vdirsyncer at this point would be to run::
|
||||||
|
|
||||||
|
pip install --user vdirsyncer
|
||||||
|
|
||||||
|
This method has a major flaw though: Pip doesn't keep track of the files it
|
||||||
|
installs. Vdirsyncer's files would be located somewhere in
|
||||||
|
``~/.local/lib/python*``, but you can't possibly know which packages were
|
||||||
|
installed as dependencies of vdirsyncer and which ones were not, should you
|
||||||
|
decide to uninstall it. In other words, using pip that way would pollute your
|
||||||
|
home directory.
|
||||||
|
|
||||||
|
The clean but hard way
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
There is a way to install Python software without scattering stuff across
|
||||||
|
your filesystem: virtualenv_. There are a lot of resources on how to use it,
|
||||||
|
the simplest possible way would look something like::
|
||||||
|
|
||||||
|
virtualenv ~/vdirsyncer_env
|
||||||
|
~/vdirsyncer_env/bin/pip install vdirsyncer
|
||||||
|
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer
|
||||||
|
|
||||||
|
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
||||||
|
|
||||||
|
This method has two advantages:
|
||||||
|
|
||||||
|
- It separately installs all Python packages into ``~/vdirsyncer_env/``,
|
||||||
|
without relying on the system packages. This works around OS- or
|
||||||
|
distro-specific issues.
|
||||||
|
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
||||||
|
|
||||||
|
The new, perfect way
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
pipsi_ is a new package manager for Python-based software that automatically
|
||||||
|
sets up a virtualenv for each program you install. Assuming you have it
|
||||||
|
installed on your operating system, you can do::
|
||||||
|
|
||||||
|
pipsi install vdirsyncer
|
||||||
|
|
||||||
|
and ``.local/bin/vdirsyncer`` will be your new vdirsyncer installation.
|
||||||
|
|
||||||
|
If you're done with vdirsyncer, you can do::
|
||||||
|
|
||||||
|
pipsi uninstall vdirsyncer
|
||||||
|
|
||||||
|
and vdirsyncer will be uninstalled, including its dependencies.
|
||||||
|
|
||||||
|
.. _virtualenv: https://virtualenv.readthedocs.org/
|
||||||
|
.. _pipsi: https://github.com/mitsuhiko/pipsi
|
||||||
|
|
|
||||||
|
|
@ -12,21 +12,17 @@ Pinning by fingerprint
|
||||||
To pin the certificate by fingerprint::
|
To pin the certificate by fingerprint::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
...
|
...
|
||||||
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
|
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
|
||||||
|
#verify = false # Optional: Disable CA validation, useful for self-signed certs
|
||||||
|
|
||||||
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
|
SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their
|
||||||
CA validation is disabled when pinning a fingerprint.
|
length.
|
||||||
|
|
||||||
You can use the following command for obtaining a SHA256 fingerprint::
|
You can use the following command for obtaining a SHA-1 fingerprint::
|
||||||
|
|
||||||
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
|
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
|
||||||
|
|
||||||
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
|
||||||
that you can forget about all of that. It is easier to deploy a free
|
|
||||||
certificate from them than configuring all of your clients to accept the
|
|
||||||
self-signed certificate.
|
|
||||||
|
|
||||||
.. _ssl-cas:
|
.. _ssl-cas:
|
||||||
|
|
||||||
|
|
@ -36,20 +32,26 @@ Custom root CAs
|
||||||
To point vdirsyncer to a custom set of root CAs::
|
To point vdirsyncer to a custom set of root CAs::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
...
|
...
|
||||||
verify = "/path/to/cert.pem"
|
verify = "/path/to/cert.pem"
|
||||||
|
|
||||||
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
|
Vdirsyncer uses the requests_ library, which, by default, `uses its own set of
|
||||||
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
|
trusted CAs
|
||||||
|
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
|
||||||
|
|
||||||
There are cases where certificate validation fails even though you can access
|
However, the actual behavior depends on how you have installed it. Some Linux
|
||||||
the server fine through e.g. your browser. This usually indicates that your
|
distributions, such as Debian, patch their ``python-requests`` package to use
|
||||||
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
|
the system certificate CAs. Normally these two stores are similar enough for
|
||||||
such cases, it makes sense to explicitly set ``verify`` or
|
you to not care.
|
||||||
``verify_fingerprint`` as shown above.
|
|
||||||
|
|
||||||
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
|
But there are cases where certificate validation fails even though you can
|
||||||
|
access the server fine through e.g. your browser. This usually indicates that
|
||||||
|
your installation of the ``requests`` library is somehow broken. In such cases,
|
||||||
|
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
|
||||||
|
above.
|
||||||
|
|
||||||
|
.. _requests: http://www.python-requests.org/
|
||||||
|
|
||||||
.. _ssl-client-certs:
|
.. _ssl-client-certs:
|
||||||
|
|
||||||
|
|
@ -60,13 +62,13 @@ Client certificates may be specified with the ``auth_cert`` parameter. If the
|
||||||
key and certificate are stored in the same file, it may be a string::
|
key and certificate are stored in the same file, it may be a string::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
...
|
...
|
||||||
auth_cert = "/path/to/certificate.pem"
|
auth_cert = "/path/to/certificate.pem"
|
||||||
|
|
||||||
If the key and certificate are separate, a list may be used::
|
If the key and certificate are separate, a list may be used::
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = caldav
|
||||||
...
|
...
|
||||||
auth_cert = ["/path/to/certificate.crt", "/path/to/key.key"]
|
auth_cert = ["/path/to/certificate.crt", "/path/to/key.key"]
|
||||||
|
|
|
||||||
211
docs/supported.rst
Normal file
211
docs/supported.rst
Normal file
|
|
@ -0,0 +1,211 @@
|
||||||
|
==================
|
||||||
|
Supported Software
|
||||||
|
==================
|
||||||
|
|
||||||
|
Client applications
|
||||||
|
===================
|
||||||
|
|
||||||
|
The following software has been reported to work well with vdirsyncer, however,
|
||||||
|
none of it is regularly tested.
|
||||||
|
|
||||||
|
Calendars
|
||||||
|
---------
|
||||||
|
|
||||||
|
- khal_, a CLI calendar application supporting :doc:`vdir <vdir>`. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
|
||||||
|
- Many graphical calendar apps such as dayplanner_, Orage_ or rainlendar_ save
|
||||||
|
a calendar in a single ``.ics`` file. You can use :storage:`singlefile` with
|
||||||
|
those.
|
||||||
|
|
||||||
|
.. _khal: http://lostpackets.de/khal/
|
||||||
|
.. _dayplanner: http://www.day-planner.org/
|
||||||
|
.. _Orage: http://www.kolumbus.fi/~w408237/orage/
|
||||||
|
.. _rainlendar: http://www.rainlendar.net/
|
||||||
|
|
||||||
|
Task/Todo managers
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The iCalendar format also supports saving tasks in form of ``VTODO``-entries,
|
||||||
|
with the same file extension as normal events: ``.ics``. All CalDAV servers
|
||||||
|
support synchronizing tasks, vdirsyncer does too.
|
||||||
|
|
||||||
|
- todoman_, a CLI task manager supporting :doc:`vdir <vdir>`. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
|
||||||
|
Its interface is similar to the ones of Taskwarrior or the todo.txt CLI app
|
||||||
|
and should be intuitively usable.
|
||||||
|
|
||||||
|
.. _todoman: https://hugo.barrera.io/journal/2015/03/30/introducing-todoman/
|
||||||
|
|
||||||
|
|
||||||
|
Contacts
|
||||||
|
--------
|
||||||
|
|
||||||
|
- khard_, a commandline addressbook supporting :doc:`vdir <vdir>`. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
|
||||||
|
- contactquery.c_, a small program explicitly written for querying vdirs from
|
||||||
|
mutt.
|
||||||
|
|
||||||
|
- mates_, a commandline addressbook supporting :doc:`vdir <vdir>`.
|
||||||
|
|
||||||
|
.. _khard: https://github.com/scheibler/khard/
|
||||||
|
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
||||||
|
.. _mates: https://github.com/untitaker/mates.rs
|
||||||
|
|
||||||
|
.. _supported-servers:
|
||||||
|
|
||||||
|
Supported servers
|
||||||
|
=================
|
||||||
|
|
||||||
|
CalDAV and CardDAV servers not listed here may work anyway.
|
||||||
|
|
||||||
|
Radicale
|
||||||
|
--------
|
||||||
|
|
||||||
|
Radicale_ is a very lightweight server, however, it intentionally doesn't
|
||||||
|
implement the CalDAV and CardDAV standards completely, which might lead to
|
||||||
|
issues even with very well-written clients. Apart from its non-conformity with
|
||||||
|
standards, there are multiple other problems with its code quality and the way
|
||||||
|
it is maintained.
|
||||||
|
|
||||||
|
That said, vdirsyncer is continuously tested against the git version and the
|
||||||
|
latest PyPI release of Radicale.
|
||||||
|
|
||||||
|
- Vdirsyncer can't create collections on Radicale.
|
||||||
|
- Radicale doesn't `support time ranges in the calendar-query of CalDAV
|
||||||
|
<https://github.com/Kozea/Radicale/issues/146>`_, so setting ``start_date``
|
||||||
|
and ``end_date`` for :storage:`caldav` will have no or unpredicted
|
||||||
|
consequences.
|
||||||
|
|
||||||
|
- `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all
|
||||||
|
items of a collection <https://github.com/Kozea/Radicale/issues/143>`_.
|
||||||
|
|
||||||
|
You have to set ``item_types = ["VTODO", "VEVENT"]`` in
|
||||||
|
:storage:`caldav` for vdirsyncer to work with those versions.
|
||||||
|
|
||||||
|
.. _Radicale: http://radicale.org/
|
||||||
|
|
||||||
|
|
||||||
|
.. _owncloud_setup:
|
||||||
|
|
||||||
|
ownCloud
|
||||||
|
--------
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against the latest version of ownCloud_::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = caldav
|
||||||
|
url = https://example.com/owncloud/remote.php/caldav/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = carddav
|
||||||
|
url = https://example.com/owncloud/remote.php/carddav/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
- *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems
|
||||||
|
detecting collisions and race-conditions. The problems were reported and are
|
||||||
|
fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since
|
||||||
|
7.0.0. See :gh:`16` for more information.
|
||||||
|
|
||||||
|
.. _ownCloud: https://owncloud.org/
|
||||||
|
|
||||||
|
FastMail
|
||||||
|
--------
|
||||||
|
|
||||||
|
Vdirsyncer is irregularly tested against FastMail_. There are no known issues
|
||||||
|
with it. `FastMail's support pages
|
||||||
|
<https://www.fastmail.com/help/technical/servernamesandports.html>`_ provide
|
||||||
|
the settings to use::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = caldav
|
||||||
|
url = https://caldav.messagingengine.com/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = carddav
|
||||||
|
url = https://carddav.messagingengine.com/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
.. _FastMail: https://www.fastmail.com/
|
||||||
|
|
||||||
|
.. _icloud_setup:
|
||||||
|
|
||||||
|
iCloud
|
||||||
|
------
|
||||||
|
|
||||||
|
Vdirsyncer is irregularly tested against iCloud_.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = caldav
|
||||||
|
url = https://caldav.icloud.com/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = carddav
|
||||||
|
url = https://contacts.icloud.com/
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
Problems:
|
||||||
|
|
||||||
|
- Vdirsyncer can't do two-factor auth with iCloud (there doesn't seem to be a
|
||||||
|
way to do two-factor auth over the DAV APIs) You'll need to use `app-specific
|
||||||
|
passwords <https://support.apple.com/en-us/HT204397>`_ instead.
|
||||||
|
- Vdirsyncer can't create collections on iCloud.
|
||||||
|
|
||||||
|
.. _iCloud: https://www.icloud.com/
|
||||||
|
|
||||||
|
.. _davmail_setup:
|
||||||
|
|
||||||
|
DavMail (Exchange, Outlook)
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Using vdirsyncer with DavMail_ is possible, but you might get confronted with
|
||||||
|
weird errors coming from old Exchange servers, malformed calendar entries with
|
||||||
|
special characters and/or using an old DavMail version.
|
||||||
|
|
||||||
|
**Make absolutely sure you use the latest DavMail**::
|
||||||
|
|
||||||
|
[storage outlook]
|
||||||
|
type = caldav
|
||||||
|
url = http://localhost:1080/
|
||||||
|
username = user@example.com
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
- Older versions of DavMail handle URLs case-insensitively. See :gh:`144`.
|
||||||
|
- DavMail is handling old broken events very poorly. In such cases the
|
||||||
|
`Calendar Checking Tool for Outlook
|
||||||
|
<https://www.microsoft.com/en-us/download/details.aspx?id=28786>`_ might
|
||||||
|
help.
|
||||||
|
|
||||||
|
If you encounter any problems, do report them to me, but it's probably not
|
||||||
|
vdirsyncer's fault.
|
||||||
|
|
||||||
|
.. _DavMail: http://davmail.sourceforge.net/
|
||||||
|
|
||||||
|
Baikal
|
||||||
|
------
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against the latest version of Baikal_.
|
||||||
|
|
||||||
|
- Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same
|
||||||
|
issue as ownCloud, see :gh:`160`. This issue is fixed in later versions.
|
||||||
|
|
||||||
|
.. _Baikal: http://baikal-server.com/
|
||||||
|
|
||||||
|
Google
|
||||||
|
------
|
||||||
|
|
||||||
|
Vdirsyncer doesn't currently support Google accounts fully. For possible
|
||||||
|
solutions see :gh:`202` and :gh:`8`.
|
||||||
|
|
@ -8,7 +8,21 @@ are better alternatives available for particular usecases.
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
|
|
||||||
See :ref:`installation`.
|
Unless you want to contribute to vdirsyncer, you should use the packages from
|
||||||
|
your distribution:
|
||||||
|
|
||||||
|
- `ArchLinux (AUR) <https://aur.archlinux.org/packages/vdirsyncer>`_
|
||||||
|
- `pkgsrc <http://pkgsrc.se/time/py-vdirsyncer>`_
|
||||||
|
- `Fedora <https://apps.fedoraproject.org/packages/vdirsyncer>`_
|
||||||
|
- `nixpkg <https://github.com/NixOS/nixpkgs/tree/master/pkgs/tools/misc/vdirsyncer>`_
|
||||||
|
- `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_
|
||||||
|
- `homebrew <http://braumeister.org/formula/vdirsyncer>`_
|
||||||
|
- Debian and Ubuntu don't have packages, but make a manual installation
|
||||||
|
especially hard. See :ref:`debian-urllib3`.
|
||||||
|
|
||||||
|
If there is no package for your distribution, you'll need to :ref:`install
|
||||||
|
vdirsyncer manually <manual-installation>`. There is an easy command to
|
||||||
|
copy-and-paste for this as well, but you should be aware of its consequences.
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
=============
|
=============
|
||||||
|
|
@ -16,13 +30,12 @@ Configuration
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
- The `config.example from the repository
|
- The `config.example from the repository
|
||||||
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_
|
<https://github.com/untitaker/vdirsyncer/blob/master/config.example>`_
|
||||||
contains a very terse version of this.
|
contains a very terse version of this.
|
||||||
|
|
||||||
- In this example we set up contacts synchronization, but calendar sync
|
- In this example we set up contacts synchronization, but calendar sync
|
||||||
works almost the same. Just swap ``type = "carddav"``
|
works almost the same. Just swap ``type = carddav`` for ``type = caldav``
|
||||||
for ``type = "caldav"`` and ``fileext = ".vcf"``
|
and ``fileext = .vcf`` for ``fileext = .ics``.
|
||||||
for ``fileext = ".ics"``.
|
|
||||||
|
|
||||||
- Take a look at the :doc:`problems` page if anything doesn't work like
|
- Take a look at the :doc:`problems` page if anything doesn't work like
|
||||||
planned.
|
planned.
|
||||||
|
|
@ -42,7 +55,7 @@ where the only required parameter is ``status_path``. The following is a
|
||||||
minimal example::
|
minimal example::
|
||||||
|
|
||||||
[general]
|
[general]
|
||||||
status_path = "~/.vdirsyncer/status/"
|
status_path = ~/.vdirsyncer/status/
|
||||||
|
|
||||||
After the general section, an arbitrary amount of *pair and storage sections*
|
After the general section, an arbitrary amount of *pair and storage sections*
|
||||||
might come.
|
might come.
|
||||||
|
|
@ -53,233 +66,94 @@ pairs of storages should actually be synchronized is defined in :ref:`pair
|
||||||
section <pair_config>`. This format is copied from OfflineIMAP, where storages
|
section <pair_config>`. This format is copied from OfflineIMAP, where storages
|
||||||
are called repositories and pairs are called accounts.
|
are called repositories and pairs are called accounts.
|
||||||
|
|
||||||
The following example synchronizes ownCloud's addressbooks to ``~/.contacts/``::
|
The following example synchronizes ownCloud's
|
||||||
|
default addressbook to ``~/.contacts/``::
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
a = "my_contacts_local"
|
a = my_contacts_local
|
||||||
b = "my_contacts_remote"
|
b = my_contacts_remote
|
||||||
collections = ["from a", "from b"]
|
collections = null
|
||||||
|
|
||||||
[storage my_contacts_local]
|
[storage my_contacts_local]
|
||||||
type = "filesystem"
|
type = filesystem
|
||||||
path = "~/.contacts/"
|
path = ~/.contacts/
|
||||||
fileext = ".vcf"
|
fileext = .vcf
|
||||||
|
|
||||||
[storage my_contacts_remote]
|
[storage my_contacts_remote]
|
||||||
type = "carddav"
|
type = carddav
|
||||||
|
url = https://owncloud.example.com/remote.php/carddav/addressbooks/bob/default/
|
||||||
# We can simplify this URL here as well. In theory it shouldn't matter.
|
username = bob
|
||||||
url = "https://owncloud.example.com/remote.php/carddav/"
|
password = asdf
|
||||||
username = "bob"
|
|
||||||
password = "asdf"
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Configuration for other servers can be found at :ref:`supported-servers`.
|
Configuration for other servers can be found at :ref:`supported-servers`.
|
||||||
|
|
||||||
After running ``vdirsyncer discover`` and ``vdirsyncer sync``, ``~/.contacts/``
|
After running ``vdirsyncer sync``, ``~/.contacts/`` will contain a bunch of
|
||||||
will contain subfolders for each addressbook, which in turn will contain a
|
``.vcf`` files which all contain a contact in ``VCARD`` format each. You can
|
||||||
bunch of ``.vcf`` files which all contain a contact in ``VCARD`` format each.
|
modify their content, add new ones and delete some [1]_, and your changes will be
|
||||||
You can modify their contents, add new ones and delete some [1]_, and your
|
synchronized to the CalDAV server after you run ``vdirsyncer sync`` again. For
|
||||||
changes will be synchronized to the CalDAV server after you run ``vdirsyncer
|
further reference, it uses the storages :storage:`filesystem` and
|
||||||
sync`` again. For further reference, it uses the storages :storage:`filesystem`
|
:storage:`carddav`.
|
||||||
and :storage:`carddav`.
|
|
||||||
|
|
||||||
However, if new collections are created on the server, it will not
|
.. [1] You'll want to :doc:`use a helper program for this <supported>`.
|
||||||
automatically start synchronizing those [2]_. You need to run ``vdirsyncer
|
|
||||||
discover`` again to re-fetch this list instead.
|
|
||||||
|
|
||||||
.. [1] You'll want to :doc:`use a helper program for this <tutorials/index>`.
|
|
||||||
|
|
||||||
.. [2] Because collections are added rarely, and checking for this case before
|
|
||||||
every synchronization isn't worth the overhead.
|
|
||||||
|
|
||||||
More Configuration
|
More Configuration
|
||||||
==================
|
==================
|
||||||
|
|
||||||
.. _conflict_resolution_tutorial:
|
.. _conflict_resolution:
|
||||||
|
|
||||||
Conflict resolution
|
Conflict resolution
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
What if the same item is changed on both sides? What should vdirsyncer
|
What if the same item is changed on both sides? What should vdirsyncer do? By
|
||||||
do? Three options are currently provided:
|
default, it will show an ugly error message, which is surely a way to avoid the
|
||||||
|
problem. Another way to solve that ambiguity is to add another line to the
|
||||||
1. vdirsyncer displays an error message (the default);
|
pair section::
|
||||||
2. vdirsyncer chooses one alternative version over the other;
|
|
||||||
3. vdirsyncer starts a command of your choice that is supposed to merge the two alternative versions.
|
|
||||||
|
|
||||||
Options 2 and 3 require adding a ``"conflict_resolution"``
|
|
||||||
parameter to the pair section. Option 2 requires giving either ``"a
|
|
||||||
wins"`` or ``"b wins"`` as value to the parameter::
|
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
...
|
...
|
||||||
conflict_resolution = "b wins"
|
conflict_resolution = b wins
|
||||||
|
|
||||||
Earlier we wrote that ``b = "my_contacts_remote"``, so when vdirsyncer encounters
|
Earlier we wrote that ``b = my_contacts_remote``, so when vdirsyncer encounters
|
||||||
the situation where an item changed on both sides, it will simply overwrite the
|
the situation where an item changed on both sides, it will simply overwrite the
|
||||||
local item with the one from the server.
|
local item with the one from the server. Of course ``a wins`` is also a valid
|
||||||
|
value.
|
||||||
|
|
||||||
Option 3 requires specifying as value of ``"conflict_resolution"`` an
|
Collection discovery
|
||||||
array starting with ``"command"`` and containing paths and arguments
|
--------------------
|
||||||
to a command. For example::
|
|
||||||
|
The above configuration only syncs a single addressbook. This is denoted by
|
||||||
|
``collections = null`` (collection = addressbook/calendar). We can change this
|
||||||
|
line to let vdirsyncer automatically sync all addressbooks it can find::
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
...
|
a = my_contacts_local
|
||||||
conflict_resolution = ["command", "vimdiff"]
|
b = my_contacts_remote
|
||||||
|
collections = ["from a", "from b"] # changed from `null`
|
||||||
|
|
||||||
In this example, ``vimdiff <a> <b>`` will be called with ``<a>`` and
|
[storage my_contacts_local]
|
||||||
``<b>`` being two temporary files containing the conflicting
|
type = filesystem
|
||||||
files. The files need to be exactly the same when the command
|
path = ~/.contacts/
|
||||||
returns. More arguments can be passed to the command by adding more
|
fileext = .vcf
|
||||||
elements to the array.
|
|
||||||
|
|
||||||
See :ref:`pair_config` for the reference documentation.
|
[storage my_contacts_remote]
|
||||||
|
type = carddav
|
||||||
|
|
||||||
.. _metasync_tutorial:
|
# We can simplify this URL here as well. In theory it shouldn't matter.
|
||||||
|
url = https://owncloud.example.com/remote.php/carddav/
|
||||||
|
username = bob
|
||||||
|
password = asdf
|
||||||
|
|
||||||
Metadata synchronization
|
With the above configuration, vdirsyncer will fetch all available collections
|
||||||
------------------------
|
from the server, and create subdirectories for each of them in
|
||||||
|
``~/.contacts/``. For example, ownCloud's default addressbook ``"default"``
|
||||||
|
would be synchronized to the location ``~/.contacts/default/``.
|
||||||
|
|
||||||
Besides items, vdirsyncer can also synchronize metadata like the addressbook's
|
Vdirsyncer fetches this list on first sync, and will re-fetch it if you change
|
||||||
or calendar's "human-friendly" name (internally called "displayname") or the
|
your configuration file. However, if new collections are created on the server,
|
||||||
color associated with a calendar. For the purpose of explaining this feature,
|
it will not automatically start synchronizing those [2]_. You should run
|
||||||
let's switch to a different base example. This time we'll synchronize calendars::
|
``vdirsyncer discover`` to re-fetch this list instead.
|
||||||
|
|
||||||
[pair my_calendars]
|
.. [2] Because collections are added rarely, and checking for this case before
|
||||||
a = "my_calendars_local"
|
every synchronization isn't worth the overhead.
|
||||||
b = "my_calendars_remote"
|
|
||||||
collections = ["from a", "from b"]
|
|
||||||
metadata = ["color"]
|
|
||||||
|
|
||||||
[storage my_calendars_local]
|
|
||||||
type = "filesystem"
|
|
||||||
path = "~/.calendars/"
|
|
||||||
fileext = ".ics"
|
|
||||||
|
|
||||||
[storage my_calendars_remote]
|
|
||||||
type = "caldav"
|
|
||||||
|
|
||||||
url = "https://owncloud.example.com/remote.php/caldav/"
|
|
||||||
username = "bob"
|
|
||||||
password = "asdf"
|
|
||||||
|
|
||||||
Run ``vdirsyncer discover`` for discovery. Then you can use ``vdirsyncer
|
|
||||||
metasync`` to synchronize the ``color`` property between your local calendars
|
|
||||||
in ``~/.calendars/`` and your ownCloud. Locally the color is just represented
|
|
||||||
as a file called ``color`` within the calendar folder.
|
|
||||||
|
|
||||||
.. _collections_tutorial:
|
|
||||||
|
|
||||||
More information about collections
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
"Collection" is a collective term for addressbooks and calendars. A Cardav or
|
|
||||||
Caldav server can contains several "collections" which correspond to several
|
|
||||||
addressbooks or calendar.
|
|
||||||
|
|
||||||
Each collection from a storage has a "collection name", a unique identifier for each
|
|
||||||
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
|
||||||
directory that represents the collection, in the case of the DAV-storages this
|
|
||||||
is the last segment of the URL. We use this identifier in the ``collections``
|
|
||||||
parameter in the ``pair``-section.
|
|
||||||
|
|
||||||
This identifier doesn't change even if you rename your calendar in whatever UI
|
|
||||||
you have, because that only changes the so-called "displayname" property [3]_.
|
|
||||||
On some servers (iCloud, Google) this identifier is randomly generated and has
|
|
||||||
no correlation with the displayname you chose.
|
|
||||||
|
|
||||||
.. [3] Which you can also synchronize with ``metasync`` using ``metadata =
|
|
||||||
["displayname"]``.
|
|
||||||
|
|
||||||
There are three collection names that have a special meaning:
|
|
||||||
|
|
||||||
- ``"from a"``, ``"from b"``: A placeholder for all collections that can be
|
|
||||||
found on side A/B when running ``vdirsyncer discover``.
|
|
||||||
- ``null``: The parameters give to the storage are exact and require no discovery.
|
|
||||||
|
|
||||||
The last one requires a bit more explanation. Assume this config which
|
|
||||||
synchronizes two directories of addressbooks::
|
|
||||||
|
|
||||||
[pair foobar]
|
|
||||||
a = "foo"
|
|
||||||
b = "bar"
|
|
||||||
collections = ["from a", "from b"]
|
|
||||||
|
|
||||||
[storage foo]
|
|
||||||
type = "filesystem"
|
|
||||||
fileext = ".vcf"
|
|
||||||
path = "./contacts_foo/"
|
|
||||||
|
|
||||||
[storage bar]
|
|
||||||
type = "filesystem"
|
|
||||||
fileext = ".vcf"
|
|
||||||
path = "./contacts_bar/"
|
|
||||||
|
|
||||||
As we saw previously this will synchronize all collections in
|
|
||||||
``./contacts_foo/`` with each same-named collection in ``./contacts_bar/``. If
|
|
||||||
there's a collection that exists on one side but not the other, vdirsyncer will
|
|
||||||
ask whether to create that folder on the other side.
|
|
||||||
|
|
||||||
If we set ``collections = null``, ``./contacts_foo/`` and ``./contacts_bar/``
|
|
||||||
are no longer treated as folders with collections, but as collections
|
|
||||||
themselves. This means that ``./contacts_foo/`` and ``./contacts_bar/`` will
|
|
||||||
contain ``.vcf``-files, not subfolders that contain ``.vcf``-files.
|
|
||||||
|
|
||||||
This is useful in situations where listing all collections fails because your
|
|
||||||
DAV-server doesn't support it, for example. In this case, you can set ``url``
|
|
||||||
of your :storage:`carddav`- or :storage:`caldav`-storage to a URL that points
|
|
||||||
to your CalDAV/CardDAV collection directly.
|
|
||||||
|
|
||||||
Note that not all storages support the ``null``-collection, for example
|
|
||||||
:storage:`google_contacts` and :storage:`google_calendar` don't.
|
|
||||||
|
|
||||||
Advanced collection configuration (server-to-server sync)
|
|
||||||
---------------------------------------------------------
|
|
||||||
|
|
||||||
The examples above are good enough if you want to synchronize a remote server
|
|
||||||
to a previously empty disk. However, even more trickery is required when you
|
|
||||||
have two servers with *already existing* collections which you want to
|
|
||||||
synchronize.
|
|
||||||
|
|
||||||
The core problem in this situation is that vdirsyncer pairs collections by
|
|
||||||
collection name by default (see definition in previous section, basically a
|
|
||||||
foldername or a remote UUID). When you have two servers, those collection names
|
|
||||||
may not line up as nicely. Suppose you created two calendars "Test", one on a
|
|
||||||
NextCloud server and one on iCloud, using their respective web interfaces. The
|
|
||||||
URLs look something like this::
|
|
||||||
|
|
||||||
NextCloud: https://example.com/remote.php/dav/calendars/user/test/
|
|
||||||
iCloud: https://p-XX.caldav.icloud.com/YYY/calendars/3b4c9995-5c67-4021-9fa0-be4633623e1c
|
|
||||||
|
|
||||||
Those are two DAV calendar collections. Their collection names will be ``test``
|
|
||||||
and ``3b4c9995-5c67-4021-9fa0-be4633623e1c`` respectively, so you don't have a
|
|
||||||
single name you can address them both with. You will need to manually "pair"
|
|
||||||
(no pun intended) those collections up like this::
|
|
||||||
|
|
||||||
[pair doublecloud]
|
|
||||||
a = "my_nextcloud"
|
|
||||||
b = "my_icloud"
|
|
||||||
collections = [["mytest", "test", "3b4c9995-5c67-4021-9fa0-be4633623e1c"]]
|
|
||||||
|
|
||||||
``mytest`` gives that combination of calendars a nice name you can use when
|
|
||||||
talking about it, so you would use ``vdirsyncer sync doublecloud/mytest`` to
|
|
||||||
say: "Only synchronize these two storages, nothing else that may be
|
|
||||||
configured".
|
|
||||||
|
|
||||||
.. note:: Why not use displaynames?
|
|
||||||
|
|
||||||
You may wonder why vdirsyncer just couldn't figure this out by itself. After
|
|
||||||
all, you did name both collections "Test" (which is called "the
|
|
||||||
displayname"), so why not pair collections by that value?
|
|
||||||
|
|
||||||
There are a few problems with this idea:
|
|
||||||
|
|
||||||
- Two calendars may have the same exact displayname.
|
|
||||||
- A calendar may not have a (non-empty) displayname.
|
|
||||||
- The displayname might change. Either you rename the calendar, or the
|
|
||||||
calendar renames itself because you change a language setting.
|
|
||||||
|
|
||||||
In the end, that property was never designed to be parsed by machines.
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
======
|
|
||||||
Baikal
|
|
||||||
======
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of Baikal_.
|
|
||||||
|
|
||||||
- Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same
|
|
||||||
issue as ownCloud, see :gh:`160`. This issue is fixed in later versions.
|
|
||||||
|
|
||||||
.. _Baikal: http://sabre.io/baikal/
|
|
||||||
|
|
@ -1,95 +0,0 @@
|
||||||
.. _claws-mail-tutorial:
|
|
||||||
|
|
||||||
Vdirsyncer with Claws Mail
|
|
||||||
==========================
|
|
||||||
|
|
||||||
First of all, Claws-Mail only supports **read-only** functions for vCards. It
|
|
||||||
can only read contacts, but there's no editor.
|
|
||||||
|
|
||||||
Preparation
|
|
||||||
-----------
|
|
||||||
|
|
||||||
We need to install vdirsyncer, for that look :doc:`here </installation>`. Then
|
|
||||||
we need to create some folders::
|
|
||||||
|
|
||||||
mkdir ~/.vdirsyncer
|
|
||||||
mkdir ~/.contacts
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Now we create the configuration for vdirsyncer. Open
|
|
||||||
``~/.vdirsyncer/config`` with a text editor. The config should look like
|
|
||||||
this:
|
|
||||||
|
|
||||||
.. code:: ini
|
|
||||||
|
|
||||||
[general]
|
|
||||||
status_path = "~/.vdirsyncer/status/"
|
|
||||||
|
|
||||||
[storage local]
|
|
||||||
type = "singlefile"
|
|
||||||
path = "~/.contacts/%s.vcf"
|
|
||||||
|
|
||||||
[storage online]
|
|
||||||
type = "carddav"
|
|
||||||
url = "CARDDAV_LINK"
|
|
||||||
username = "USERNAME"
|
|
||||||
password = "PASSWORD"
|
|
||||||
read_only = true
|
|
||||||
|
|
||||||
[pair contacts]
|
|
||||||
a = "local"
|
|
||||||
b = "online"
|
|
||||||
collections = ["from a", "from b"]
|
|
||||||
conflict_resolution = "b wins"
|
|
||||||
|
|
||||||
- In the general section, we define the status folder path, for discovered
|
|
||||||
collections and generally stuff that needs to persist between syncs.
|
|
||||||
- In the local section we define that all contacts should be sync in a single
|
|
||||||
file and the path for the contacts.
|
|
||||||
- In the online section you must change the url, username and password to your
|
|
||||||
setup. We also set the storage to read-only such that no changes get
|
|
||||||
synchronized back. Claws-Mail should not be able to do any changes anyway,
|
|
||||||
but this is one extra safety step in case files get corrupted or vdirsyncer
|
|
||||||
behaves erratically. You can leave that part out if you want to be able to
|
|
||||||
edit those files locally.
|
|
||||||
- In the last section we configure that online contacts win in a conflict
|
|
||||||
situation. Configure this part however you like. A correct value depends on
|
|
||||||
which side is most likely to be up-to-date.
|
|
||||||
|
|
||||||
Sync
|
|
||||||
----
|
|
||||||
|
|
||||||
Now we discover and sync our contacts::
|
|
||||||
|
|
||||||
vdirsyncer discover contacts
|
|
||||||
vdirsyncer sync contacts
|
|
||||||
|
|
||||||
Claws Mail
|
|
||||||
----------
|
|
||||||
|
|
||||||
Open Claws-Mail. Go to **Tools** => **Addressbook**.
|
|
||||||
|
|
||||||
Click on **Addressbook** => **New vCard**. Choose a name for the book.
|
|
||||||
|
|
||||||
Then search for the for the vCard in the folder **~/.contacts/**. Click
|
|
||||||
ok, and you we will see your contacts.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Claws-Mail shows only contacts that have a mail address.
|
|
||||||
|
|
||||||
Crontab
|
|
||||||
-------
|
|
||||||
|
|
||||||
On the end we create a crontab, so that vdirsyncer syncs automatically
|
|
||||||
every 30 minutes our contacts::
|
|
||||||
|
|
||||||
crontab -e
|
|
||||||
|
|
||||||
On the end of that file enter this line::
|
|
||||||
|
|
||||||
*/30 * * * * /usr/local/bin/vdirsyncer sync > /dev/null
|
|
||||||
|
|
||||||
And you're done!
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
||||||
.. _davmail_setup:
|
|
||||||
|
|
||||||
===========================
|
|
||||||
DavMail (Exchange, Outlook)
|
|
||||||
===========================
|
|
||||||
|
|
||||||
DavMail_ is a proxy program that allows you to use Card- and CalDAV clients
|
|
||||||
with Outlook. That allows you to use vdirsyncer with Outlook.
|
|
||||||
|
|
||||||
In practice your success with DavMail may wildly vary. Depending on your
|
|
||||||
Exchange server you might get confronted with weird errors of all sorts
|
|
||||||
(including data-loss).
|
|
||||||
|
|
||||||
**Make absolutely sure you use the latest DavMail**::
|
|
||||||
|
|
||||||
[storage outlook]
|
|
||||||
type = "caldav"
|
|
||||||
url = "http://localhost:1080/users/user@example.com/calendar/"
|
|
||||||
username = "user@example.com"
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
- Older versions of DavMail handle URLs case-insensitively. See :gh:`144`.
|
|
||||||
- DavMail is handling malformed data on the Exchange server very poorly. In
|
|
||||||
such cases the `Calendar Checking Tool for Outlook
|
|
||||||
<https://www.microsoft.com/en-us/download/details.aspx?id=28786>`_ might
|
|
||||||
help.
|
|
||||||
- In some cases, you may see errors about duplicate events. It may look
|
|
||||||
something like this::
|
|
||||||
|
|
||||||
error: my_calendar/calendar: Storage "my_calendar_remote/calendar" contains multiple items with the same UID or even content. Vdirsyncer will now abort the synchronization of this collection, because the fix for this is not clear; It could be the result of a badly behaving server. You can try running:
|
|
||||||
error:
|
|
||||||
error: vdirsyncer repair my_calendar_remote/calendar
|
|
||||||
error:
|
|
||||||
error: But make sure to have a backup of your data in some form. The offending hrefs are:
|
|
||||||
[...]
|
|
||||||
|
|
||||||
In order to fix this, you can try the Remove-DuplicateAppointments.ps1_
|
|
||||||
PowerShell script that Microsoft has come up with in order to remove duplicates.
|
|
||||||
|
|
||||||
.. _DavMail: http://davmail.sourceforge.net/
|
|
||||||
.. _Remove-DuplicateAppointments.ps1: https://blogs.msdn.microsoft.com/emeamsgdev/2015/02/12/powershell-remove-duplicate-calendar-appointments/
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
========
|
|
||||||
FastMail
|
|
||||||
========
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against FastMail_, thanks to them for
|
|
||||||
providing a free account for this purpose. There are no known issues with it.
|
|
||||||
`FastMail's support pages
|
|
||||||
<https://www.fastmail.com/help/technical/servernamesandports.html>`_ provide
|
|
||||||
the settings to use::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://caldav.fastmail.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://carddav.fastmail.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
.. _FastMail: https://www.fastmail.com/
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
======
|
|
||||||
Google
|
|
||||||
======
|
|
||||||
|
|
||||||
Using vdirsyncer with Google Calendar is possible as of 0.10, but it is not
|
|
||||||
tested frequently. You can use :storage:`google_contacts` and
|
|
||||||
:storage:`google_calendar`.
|
|
||||||
|
|
||||||
For more information see :gh:`202` and :gh:`8`.
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
.. _icloud_setup:
|
|
||||||
|
|
||||||
======
|
|
||||||
iCloud
|
|
||||||
======
|
|
||||||
|
|
||||||
Vdirsyncer is regularly tested against iCloud_.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://caldav.icloud.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://contacts.icloud.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
Problems:
|
|
||||||
|
|
||||||
- Vdirsyncer can't do two-factor auth with iCloud (there doesn't seem to be a
|
|
||||||
way to do two-factor auth over the DAV APIs) You'll need to use `app-specific
|
|
||||||
passwords <https://support.apple.com/en-us/HT204397>`_ instead.
|
|
||||||
- iCloud has a few special requirements when creating collections. In principle
|
|
||||||
vdirsyncer can do it, but it is recommended to create them from an Apple
|
|
||||||
client (or the iCloud web interface).
|
|
||||||
|
|
||||||
- iCloud requires a minimum length of collection names.
|
|
||||||
- Calendars created by vdirsyncer cannot be used as tasklists.
|
|
||||||
|
|
||||||
.. _iCloud: https://www.icloud.com/
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
===============
|
|
||||||
Other tutorials
|
|
||||||
===============
|
|
||||||
|
|
||||||
The following section contains tutorials not explicitly about any particular
|
|
||||||
core function of vdirsyncer. They usually show how to integrate vdirsyncer with
|
|
||||||
third-party software. Because of that, it may be that the information regarding
|
|
||||||
that other software only applies to specific versions of them.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Please :doc:`contribute </contributing>` your own tutorials too! Pages are
|
|
||||||
often only stubs and are lacking full examples.
|
|
||||||
|
|
||||||
Client applications
|
|
||||||
===================
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
claws-mail
|
|
||||||
systemd-timer
|
|
||||||
todoman
|
|
||||||
|
|
||||||
Further applications, with missing pages:
|
|
||||||
|
|
||||||
- khal_, a CLI calendar application supporting :doc:`vdir </vdir>`. You can use
|
|
||||||
:storage:`filesystem` with it.
|
|
||||||
- Many graphical calendar apps such as dayplanner_, Orage_ or rainlendar_ save
|
|
||||||
a calendar in a single ``.ics`` file. You can use :storage:`singlefile` with
|
|
||||||
those.
|
|
||||||
- khard_, a commandline addressbook supporting :doc:`vdir </vdir>`. You can use
|
|
||||||
:storage:`filesystem` with it.
|
|
||||||
- contactquery.c_, a small program explicitly written for querying vdirs from
|
|
||||||
mutt.
|
|
||||||
- mates_, a commandline addressbook supporting :doc:`vdir </vdir>`.
|
|
||||||
- vdirel_, access :doc:`vdir </vdir>` contacts from Emacs.
|
|
||||||
|
|
||||||
.. _khal: http://lostpackets.de/khal/
|
|
||||||
.. _dayplanner: http://www.day-planner.org/
|
|
||||||
.. _Orage: https://gitlab.xfce.org/apps/orage
|
|
||||||
.. _rainlendar: http://www.rainlendar.net/
|
|
||||||
.. _khard: https://github.com/scheibler/khard/
|
|
||||||
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
|
||||||
.. _mates: https://github.com/pimutils/mates.rs
|
|
||||||
.. _vdirel: https://github.com/DamienCassou/vdirel
|
|
||||||
|
|
||||||
.. _supported-servers:
|
|
||||||
|
|
||||||
Servers
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
baikal
|
|
||||||
davmail
|
|
||||||
fastmail
|
|
||||||
google
|
|
||||||
icloud
|
|
||||||
nextcloud
|
|
||||||
owncloud
|
|
||||||
radicale
|
|
||||||
xandikos
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
=========
|
|
||||||
nextCloud
|
|
||||||
=========
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of nextCloud_::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://nextcloud.example.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://nextcloud.example.com/"
|
|
||||||
|
|
||||||
- WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant
|
|
||||||
issue <https://github.com/nextcloud/calendar/issues/63>`_.
|
|
||||||
|
|
||||||
.. _nextCloud: https://nextcloud.com/
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
.. _owncloud_setup:
|
|
||||||
|
|
||||||
========
|
|
||||||
ownCloud
|
|
||||||
========
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of ownCloud_::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://example.com/remote.php/dav/"
|
|
||||||
username = ...
|
|
||||||
password = ...
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://example.com/remote.php/dav/"
|
|
||||||
username = ...
|
|
||||||
password = ...
|
|
||||||
|
|
||||||
- *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems
|
|
||||||
detecting collisions and race-conditions. The problems were reported and are
|
|
||||||
fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since
|
|
||||||
7.0.0. See :gh:`16` for more information.
|
|
||||||
|
|
||||||
.. _ownCloud: https://owncloud.org/
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
========
|
|
||||||
Radicale
|
|
||||||
========
|
|
||||||
|
|
||||||
Radicale_ is a very lightweight server, however, it intentionally doesn't
|
|
||||||
implement the CalDAV and CardDAV standards completely, which might lead to
|
|
||||||
issues even with very well-written clients. Apart from its non-conformity with
|
|
||||||
standards, there are multiple other problems with its code quality and the way
|
|
||||||
it is maintained. Consider using e.g. :doc:`xandikos` instead.
|
|
||||||
|
|
||||||
That said, vdirsyncer is continuously tested against the git version and the
|
|
||||||
latest PyPI release of Radicale.
|
|
||||||
|
|
||||||
- Vdirsyncer can't create collections on Radicale.
|
|
||||||
- Radicale doesn't `support time ranges in the calendar-query of CalDAV
|
|
||||||
<https://github.com/Kozea/Radicale/issues/146>`_, so setting ``start_date``
|
|
||||||
and ``end_date`` for :storage:`caldav` will have no or unpredicted
|
|
||||||
consequences.
|
|
||||||
|
|
||||||
- `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all
|
|
||||||
items of a collection <https://github.com/Kozea/Radicale/issues/143>`_.
|
|
||||||
|
|
||||||
You have to set ``item_types = ["VTODO", "VEVENT"]`` in
|
|
||||||
:storage:`caldav` for vdirsyncer to work with those versions.
|
|
||||||
|
|
||||||
.. _Radicale: http://radicale.org/
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
.. _systemd_timer-tutorial:
|
|
||||||
|
|
||||||
Running as a systemd.timer
|
|
||||||
==========================
|
|
||||||
|
|
||||||
vdirsyncer includes unit files to run at an interval (by default every 15±5
|
|
||||||
minutes).
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
These are not installed when installing via pip, only via distribution
|
|
||||||
packages. If you installed via pip, or your distribution doesn't ship systemd
|
|
||||||
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
|
||||||
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
|
||||||
|
|
||||||
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service
|
|
||||||
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer
|
|
||||||
|
|
||||||
Activation
|
|
||||||
----------
|
|
||||||
|
|
||||||
To activate the timer, just run ``systemctl --user enable vdirsyncer.timer``.
|
|
||||||
To see logs of previous runs, use ``journalctl --user -u vdirsyncer``.
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
-------------
|
|
||||||
|
|
||||||
It's quite possible that the default "every fifteen minutes" interval isn't to
|
|
||||||
your liking. No default will suit everybody, but this is configurable by simply
|
|
||||||
running::
|
|
||||||
|
|
||||||
systemctl --user edit vdirsyncer.timer
|
|
||||||
|
|
||||||
This will open a blank editor, where you can override the timer by including::
|
|
||||||
|
|
||||||
OnBootSec=5m # This is how long after boot the first run takes place.
|
|
||||||
OnUnitActiveSec=15m # This is how often subsequent runs take place.
|
|
||||||
|
|
@ -1,69 +0,0 @@
|
||||||
=======
|
|
||||||
Todoman
|
|
||||||
=======
|
|
||||||
|
|
||||||
The iCalendar format also supports saving tasks in form of ``VTODO``-entries,
|
|
||||||
with the same file extension as normal events: ``.ics``. Many CalDAV servers
|
|
||||||
support synchronizing tasks, vdirsyncer does too.
|
|
||||||
|
|
||||||
todoman_ is a CLI task manager supporting :doc:`vdir </vdir>`. Its interface is
|
|
||||||
similar to the ones of Taskwarrior or the todo.txt CLI app. You can use
|
|
||||||
:storage:`filesystem` with it.
|
|
||||||
|
|
||||||
.. _todoman: http://todoman.readthedocs.io/
|
|
||||||
|
|
||||||
Setting up vdirsyncer
|
|
||||||
=====================
|
|
||||||
|
|
||||||
For this tutorial we will use NextCloud.
|
|
||||||
|
|
||||||
Assuming a config like this::
|
|
||||||
|
|
||||||
[general]
|
|
||||||
status_path = "~/.vdirsyncer/status/"
|
|
||||||
|
|
||||||
[pair calendars]
|
|
||||||
conflict_resolution = "b wins"
|
|
||||||
a = "calendars_local"
|
|
||||||
b = "calendars_dav"
|
|
||||||
collections = ["from b"]
|
|
||||||
metadata = ["color", "displayname"]
|
|
||||||
|
|
||||||
[storage calendars_local]
|
|
||||||
type = "filesystem"
|
|
||||||
path = "~/.calendars/"
|
|
||||||
fileext = ".ics"
|
|
||||||
|
|
||||||
[storage calendars_dav]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://nextcloud.example.net/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
``vdirsyncer sync`` will then synchronize the calendars of your NextCloud_
|
|
||||||
instance to subfolders of ``~/.calendar/``.
|
|
||||||
|
|
||||||
.. _NextCloud: https://nextcloud.com/
|
|
||||||
|
|
||||||
Setting up todoman
|
|
||||||
==================
|
|
||||||
|
|
||||||
Write this to ``~/.config/todoman/config.py``::
|
|
||||||
|
|
||||||
path = "~/.calendars/*"
|
|
||||||
|
|
||||||
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
|
||||||
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
|
||||||
described in its documentation_ and run ``vdirsyncer sync`` to synchronize the changes to NextCloud.
|
|
||||||
|
|
||||||
.. _glob: https://en.wikipedia.org/wiki/Glob_(programming)
|
|
||||||
.. _documentation: http://todoman.readthedocs.io/
|
|
||||||
|
|
||||||
Other clients
|
|
||||||
=============
|
|
||||||
|
|
||||||
The following client applications also synchronize over CalDAV:
|
|
||||||
|
|
||||||
- The Tasks-app found on iOS
|
|
||||||
- `OpenTasks for Android <https://github.com/dmfs/opentasks>`_
|
|
||||||
- The `Tasks <https://apps.nextcloud.com/apps/tasks>`_-app for NextCloud's web UI
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
========
|
|
||||||
Xandikos
|
|
||||||
========
|
|
||||||
|
|
||||||
Xandikos_ is a lightweight, yet complete CalDAV and CardDAV server, backed by
|
|
||||||
git. Vdirsyncer is continuously tested against its latest version.
|
|
||||||
|
|
||||||
After running ``./bin/xandikos --defaults -d $HOME/dav``, you should be able to
|
|
||||||
point vdirsyncer against the root of Xandikos like this::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://xandikos.example.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://xandikos.example.com/"
|
|
||||||
username = "..."
|
|
||||||
password = "..."
|
|
||||||
|
|
||||||
.. _Xandikos: https://github.com/jelmer/xandikos
|
|
||||||
|
|
@ -30,11 +30,12 @@ also implies only one ``VEVENT``/``VTODO``/``VCARD`` component per file, but
|
||||||
e.g. recurrence exceptions would require multiple ``VEVENT`` components per
|
e.g. recurrence exceptions would require multiple ``VEVENT`` components per
|
||||||
event.
|
event.
|
||||||
|
|
||||||
The filename should have similar properties as the ``UID`` of the file content.
|
The filename *should* consist of the ``ident``, followed by the file extension.
|
||||||
However, there is no requirement for these two to be the same. Programs may
|
The ``ident`` is either the ``UID``, if the item has one, else a string with
|
||||||
choose to store additional metadata in that filename, however, at the same time
|
similar properties as the ``UID``. However, several restrictions of the
|
||||||
they *must not* assume that the metadata they included will be preserved by
|
underlying filesystem might make an implementation of this naming scheme for
|
||||||
other programs.
|
items' filenames impossible. The approach to deal with such cases is left to
|
||||||
|
the client, which are free to choose a different scheme for filenames instead.
|
||||||
|
|
||||||
.. _vCard: https://tools.ietf.org/html/rfc6350
|
.. _vCard: https://tools.ietf.org/html/rfc6350
|
||||||
.. _iCalendar: https://tools.ietf.org/html/rfc5545
|
.. _iCalendar: https://tools.ietf.org/html/rfc5545
|
||||||
|
|
@ -56,11 +57,8 @@ have any file extensions.
|
||||||
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
||||||
present.
|
present.
|
||||||
|
|
||||||
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/
|
- A file called ``displayname`` contains a UTF-8 encoded label that may be used
|
||||||
description, that may be used to represent the vdir in UIs.
|
to represent the vdir in UIs.
|
||||||
|
|
||||||
- A file called ``order`` inside the vdir includes the relative order
|
|
||||||
of the calendar, a property that is only relevant in UI design.
|
|
||||||
|
|
||||||
Writing to vdirs
|
Writing to vdirs
|
||||||
================
|
================
|
||||||
|
|
@ -99,7 +97,7 @@ collections for faster search and lookup.
|
||||||
|
|
||||||
The reason items' filenames don't contain any extra information is simple: The
|
The reason items' filenames don't contain any extra information is simple: The
|
||||||
solutions presented induced duplication of data, where one duplicate might
|
solutions presented induced duplication of data, where one duplicate might
|
||||||
become out of date because of bad implementations. As it stands right now, an
|
become out of date because of bad implementations. As it stands right now, a
|
||||||
index format could be formalized separately though.
|
index format could be formalized separately though.
|
||||||
|
|
||||||
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ program chosen:
|
||||||
|
|
||||||
This is a good idea if the user is directly interfacing with the file system
|
This is a good idea if the user is directly interfacing with the file system
|
||||||
and is able to resolve conflicts themselves. Here it might lead to
|
and is able to resolve conflicts themselves. Here it might lead to
|
||||||
erroneous behavior with e.g. ``khal``, since there are now two events with
|
errorneous behavior with e.g. ``khal``, since there are now two events with
|
||||||
the same UID.
|
the same UID.
|
||||||
|
|
||||||
This point doesn't apply to git: It has very good merging capabilities,
|
This point doesn't apply to git: It has very good merging capabilities,
|
||||||
|
|
@ -50,6 +50,7 @@ program chosen:
|
||||||
|
|
||||||
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
||||||
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
||||||
e.g. DAVx⁵_ or other apps bundled with smartphones.
|
e.g. DAVDroid_ or the apps by dmfs_.
|
||||||
|
|
||||||
.. _DAVx⁵: https://www.davx5.com/
|
.. _DAVDroid: http://davdroid.bitfire.at/
|
||||||
|
.. _dmfs: https://dmfs.org/
|
||||||
|
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
# Push new version to PyPI.
|
|
||||||
#
|
|
||||||
# Usage: hut builds submit publish-release.yaml --follow
|
|
||||||
|
|
||||||
image: alpine/edge
|
|
||||||
packages:
|
|
||||||
- py3-build
|
|
||||||
- py3-pip
|
|
||||||
- py3-setuptools
|
|
||||||
- py3-setuptools_scm
|
|
||||||
- py3-wheel
|
|
||||||
- twine
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
secrets:
|
|
||||||
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
|
|
||||||
environment:
|
|
||||||
CI: true
|
|
||||||
tasks:
|
|
||||||
- check-tag: |
|
|
||||||
cd vdirsyncer
|
|
||||||
git fetch --tags
|
|
||||||
|
|
||||||
# Stop here unless this is a tag.
|
|
||||||
git describe --exact-match --tags || complete-build
|
|
||||||
- publish: |
|
|
||||||
cd vdirsyncer
|
|
||||||
python -m build --no-isolation
|
|
||||||
twine upload --non-interactive dist/*
|
|
||||||
114
pyproject.toml
114
pyproject.toml
|
|
@ -1,114 +0,0 @@
|
||||||
# Vdirsyncer synchronizes calendars and contacts.
|
|
||||||
#
|
|
||||||
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
|
||||||
# how to package vdirsyncer.
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["setuptools>=64", "setuptools_scm>=8"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "vdirsyncer"
|
|
||||||
authors = [
|
|
||||||
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
|
|
||||||
]
|
|
||||||
description = "Synchronize calendars and contacts"
|
|
||||||
readme = "README.rst"
|
|
||||||
requires-python = ">=3.9"
|
|
||||||
keywords = ["todo", "task", "icalendar", "cli"]
|
|
||||||
license = "BSD-3-Clause"
|
|
||||||
license-files = ["LICENSE"]
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 4 - Beta",
|
|
||||||
"Environment :: Console",
|
|
||||||
"Operating System :: POSIX",
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
"Programming Language :: Python :: 3.12",
|
|
||||||
"Programming Language :: Python :: 3.13",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Topic :: Internet",
|
|
||||||
"Topic :: Office/Business :: Scheduling",
|
|
||||||
"Topic :: Utilities",
|
|
||||||
]
|
|
||||||
dependencies = [
|
|
||||||
"click>=5.0,<9.0",
|
|
||||||
"click-log>=0.3.0,<0.5.0",
|
|
||||||
"requests>=2.20.0",
|
|
||||||
"aiohttp>=3.8.2,<4.0.0",
|
|
||||||
"aiostream>=0.4.3,<0.8.0",
|
|
||||||
"tenacity>=9.0.0",
|
|
||||||
]
|
|
||||||
dynamic = ["version"]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
google = ["aiohttp-oauthlib"]
|
|
||||||
test = [
|
|
||||||
"hypothesis>=6.72.0,<7.0.0",
|
|
||||||
"pytest",
|
|
||||||
"pytest-cov",
|
|
||||||
"pytest-httpserver",
|
|
||||||
"trustme",
|
|
||||||
"pytest-asyncio",
|
|
||||||
"aioresponses",
|
|
||||||
]
|
|
||||||
docs = [
|
|
||||||
"sphinx!=1.4.7",
|
|
||||||
"sphinx_rtd_theme",
|
|
||||||
"setuptools_scm",
|
|
||||||
]
|
|
||||||
check = [
|
|
||||||
"mypy",
|
|
||||||
"ruff",
|
|
||||||
"types-docutils",
|
|
||||||
"types-requests",
|
|
||||||
"types-setuptools",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
vdirsyncer = "vdirsyncer.cli:app"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
extend-select = [
|
|
||||||
"B0",
|
|
||||||
"C4",
|
|
||||||
"E",
|
|
||||||
"I",
|
|
||||||
"RSE",
|
|
||||||
"SIM",
|
|
||||||
"TID",
|
|
||||||
"UP",
|
|
||||||
"W",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
|
||||||
force-single-line = true
|
|
||||||
required-imports = ["from __future__ import annotations"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = """
|
|
||||||
--tb=short
|
|
||||||
--cov-config .coveragerc
|
|
||||||
--cov=vdirsyncer
|
|
||||||
--cov-report=term-missing:skip-covered
|
|
||||||
--no-cov-on-fail
|
|
||||||
--color=yes
|
|
||||||
"""
|
|
||||||
# filterwarnings=error
|
|
||||||
asyncio_default_fixture_loop_scope = "function"
|
|
||||||
|
|
||||||
[tool.mypy]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[tool.coverage.report]
|
|
||||||
exclude_lines = [
|
|
||||||
"if TYPE_CHECKING:",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
include = ["vdirsyncer*"]
|
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
|
||||||
write_to = "vdirsyncer/version.py"
|
|
||||||
version_scheme = "no-guess-dev"
|
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# This script is mean to be run inside a dedicated container,
|
|
||||||
# and not interatively.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y build-essential fakeroot debhelper git
|
|
||||||
apt-get install -y python3-all python3-pip python3-venv
|
|
||||||
apt-get install -y ruby ruby-dev
|
|
||||||
|
|
||||||
pip3 install virtualenv virtualenv-tools3
|
|
||||||
virtualenv -p python3 /vdirsyncer/env/
|
|
||||||
|
|
||||||
gem install fpm
|
|
||||||
|
|
||||||
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
|
||||||
pip3 uninstall -y virtualenv
|
|
||||||
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
|
||||||
chmod +x /usr/local/bin/virtualenv
|
|
||||||
|
|
||||||
cp -r /source/ /vdirsyncer/vdirsyncer/
|
|
||||||
cd /vdirsyncer/vdirsyncer/ || exit 2
|
|
||||||
mkdir /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
|
||||||
# XXX: Do I really not want google support included?
|
|
||||||
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
|
||||||
fpm --verbose \
|
|
||||||
--input-type virtualenv \
|
|
||||||
--output-type deb \
|
|
||||||
--name "vdirsyncer-latest" \
|
|
||||||
--version "$(cat version)" \
|
|
||||||
--prefix /opt/venvs/vdirsyncer-latest \
|
|
||||||
--depends python3 \
|
|
||||||
requirements.txt
|
|
||||||
|
|
||||||
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
cd /vdirsyncer/pkgs/
|
|
||||||
dpkg -i -- *.deb
|
|
||||||
|
|
||||||
# Check that it works:
|
|
||||||
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
|
||||||
|
|
||||||
cp -- *.deb /source/
|
|
||||||
75
scripts/make_travisconf.py
Normal file
75
scripts/make_travisconf.py
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
import contextlib
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
print("# Generated by scripts/make_travisconf.py")
|
||||||
|
print("")
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
|
||||||
|
def p(s):
|
||||||
|
print(" " * i + s)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def section(name):
|
||||||
|
p("{}:".format(name))
|
||||||
|
global i
|
||||||
|
i += 1
|
||||||
|
yield
|
||||||
|
i -= 1
|
||||||
|
print("")
|
||||||
|
|
||||||
|
p("sudo: true")
|
||||||
|
p("language: python")
|
||||||
|
p("")
|
||||||
|
|
||||||
|
with section("install"):
|
||||||
|
p('- ". scripts/travis-install.sh"')
|
||||||
|
|
||||||
|
p('- "pip install -U pip"')
|
||||||
|
p('- "pip install wheel"')
|
||||||
|
p('- "make -e install-dev"')
|
||||||
|
p('- "make -e install-$BUILD"')
|
||||||
|
|
||||||
|
with section("script"):
|
||||||
|
p('- "make -e $BUILD"')
|
||||||
|
|
||||||
|
with section("matrix"):
|
||||||
|
with section("include"):
|
||||||
|
for python in ("2.7", "3.3", "3.4", "3.5", "pypy"):
|
||||||
|
h = lambda: p("- python: {}".format(python))
|
||||||
|
h()
|
||||||
|
p(" env: BUILD=style")
|
||||||
|
|
||||||
|
if python in ("2.7", "3.5"):
|
||||||
|
dav_servers = ("radicale", "owncloud", "baikal", "davical")
|
||||||
|
rs_servers = ("mysteryshack",)
|
||||||
|
else:
|
||||||
|
dav_servers = ("radicale",)
|
||||||
|
rs_servers = ()
|
||||||
|
|
||||||
|
for (server_type, server), requirements in itertools.product(
|
||||||
|
itertools.chain(
|
||||||
|
(("REMOTESTORAGE", x) for x in rs_servers),
|
||||||
|
(("DAV", x) for x in dav_servers)
|
||||||
|
),
|
||||||
|
("devel", "release", "minimal")
|
||||||
|
):
|
||||||
|
h()
|
||||||
|
p(" env: "
|
||||||
|
"BUILD=test "
|
||||||
|
"{server_type}_SERVER={server} "
|
||||||
|
"REQUIREMENTS={requirements}"
|
||||||
|
.format(server_type=server_type,
|
||||||
|
server=server,
|
||||||
|
requirements=requirements))
|
||||||
|
|
||||||
|
p("- language: generic")
|
||||||
|
p(" os: osx")
|
||||||
|
p(" env: BUILD=test")
|
||||||
|
|
||||||
|
with section("branches"):
|
||||||
|
with section("only"):
|
||||||
|
p('- auto')
|
||||||
|
p('- master')
|
||||||
|
|
@ -1,56 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -xeu
|
|
||||||
|
|
||||||
SCRIPT_PATH=$(realpath "$0")
|
|
||||||
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
|
||||||
|
|
||||||
# E.g.: debian, ubuntu
|
|
||||||
DISTRO=${DISTRO:1}
|
|
||||||
# E.g.: bullseye, bookwork
|
|
||||||
DISTROVER=${DISTROVER:2}
|
|
||||||
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
|
|
||||||
CONTEXT="$(mktemp -d)"
|
|
||||||
|
|
||||||
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
|
|
||||||
|
|
||||||
cleanup() {
|
|
||||||
rm -rf "$CONTEXT"
|
|
||||||
}
|
|
||||||
trap cleanup EXIT
|
|
||||||
|
|
||||||
# Prepare files.
|
|
||||||
cp scripts/_build_deb_in_container.bash "$CONTEXT"
|
|
||||||
python setup.py sdist -d "$CONTEXT"
|
|
||||||
|
|
||||||
docker run -it \
|
|
||||||
--name "$CONTAINER_NAME" \
|
|
||||||
--volume "$CONTEXT:/source" \
|
|
||||||
"$DISTRO:$DISTROVER" \
|
|
||||||
bash /source/_build_deb_in_container.bash
|
|
||||||
|
|
||||||
# Keep around the package filename.
|
|
||||||
PACKAGE=$(ls "$CONTEXT"/*.deb)
|
|
||||||
PACKAGE=$(basename "$PACKAGE")
|
|
||||||
|
|
||||||
# Save the build deb files.
|
|
||||||
mkdir -p "$DEST_DIR"
|
|
||||||
cp "$CONTEXT"/*.deb "$DEST_DIR"
|
|
||||||
|
|
||||||
echo Build complete! 🤖
|
|
||||||
|
|
||||||
# Packagecloud uses some internal IDs for each distro.
|
|
||||||
# Extract the one for the distro we're publishing.
|
|
||||||
DISTRO_ID=$(
|
|
||||||
curl -s \
|
|
||||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
|
|
||||||
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Actually push the package.
|
|
||||||
curl \
|
|
||||||
-F "package[distro_version_id]=$DISTRO_ID" \
|
|
||||||
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
|
|
||||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
|
|
||||||
|
|
||||||
echo Done! ✨
|
|
||||||
26
scripts/travis-install.sh
Normal file
26
scripts/travis-install.sh
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Travis uses an outdated PyPy, this installs the most recent one. This
|
||||||
|
# makes the tests run on Travis' legacy infrastructure, but so be it.
|
||||||
|
# temporary pyenv installation to get pypy-2.6 before container infra
|
||||||
|
# upgrade
|
||||||
|
# Taken from werkzeug, which took it from pyca/cryptography
|
||||||
|
if [ "$TRAVIS_PYTHON_VERSION" = "pypy" ]; then
|
||||||
|
git clone https://github.com/yyuu/pyenv.git ~/.pyenv;
|
||||||
|
PYENV_ROOT="$HOME/.pyenv";
|
||||||
|
PATH="$PYENV_ROOT/bin:$PATH";
|
||||||
|
eval "$(pyenv init -)";
|
||||||
|
pyenv install pypy-4.0.1;
|
||||||
|
pyenv global pypy-4.0.1;
|
||||||
|
python --version;
|
||||||
|
pip --version;
|
||||||
|
fi
|
||||||
|
|
||||||
|
# The OS X VM doesn't have any Python support at all
|
||||||
|
# See https://github.com/travis-ci/travis-ci/issues/2312
|
||||||
|
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
||||||
|
brew update
|
||||||
|
brew install python3
|
||||||
|
virtualenv -p python3 $HOME/osx-py3
|
||||||
|
. $HOME/osx-py3/bin/activate
|
||||||
|
fi
|
||||||
10
setup.cfg
Normal file
10
setup.cfg
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
[wheel]
|
||||||
|
universal = 1
|
||||||
|
|
||||||
|
[pytest]
|
||||||
|
norecursedirs = tests/storage/servers/*
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
# W503: Line break before operator
|
||||||
|
ignore = W503, E731
|
||||||
|
exclude = tests/storage/servers/*
|
||||||
88
setup.py
Normal file
88
setup.py
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
Vdirsyncer is a synchronization tool for vdir. See the README for more details.
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Packagers: Vdirsyncer's version is automatically detected using
|
||||||
|
# setuptools-scm, but that one is not a runtime dependency.
|
||||||
|
#
|
||||||
|
# Do NOT use the GitHub's tarballs, those don't contain any version information
|
||||||
|
# detectable for setuptools-scm. Rather use the PyPI ones.
|
||||||
|
|
||||||
|
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from setuptools import Command, find_packages, setup
|
||||||
|
|
||||||
|
|
||||||
|
requirements = [
|
||||||
|
# https://github.com/mitsuhiko/click/issues/200
|
||||||
|
'click>=5.0',
|
||||||
|
'click-log>=0.1.3',
|
||||||
|
'click-threading>=0.1.2',
|
||||||
|
# !=2.9.0: https://github.com/kennethreitz/requests/issues/2930
|
||||||
|
# >=2.4.1: https://github.com/shazow/urllib3/pull/444
|
||||||
|
#
|
||||||
|
# Without the above pull request, `verify=False` also disables fingerprint
|
||||||
|
# validation. This is *not* what we want, and it's not possible to
|
||||||
|
# replicate vdirsyncer's current behavior (verifying fingerprints without
|
||||||
|
# verifying against CAs) with older versions of urllib3.
|
||||||
|
'requests >=2.4.1, !=2.9.0',
|
||||||
|
'lxml >=3.1' + (
|
||||||
|
# See https://github.com/untitaker/vdirsyncer/issues/298
|
||||||
|
# We pin some LXML version that is known to work with PyPy
|
||||||
|
# I assume nobody actually uses PyPy with vdirsyncer, so this is
|
||||||
|
# moot
|
||||||
|
', <=3.4.4'
|
||||||
|
if platform.python_implementation() == 'PyPy'
|
||||||
|
else ''
|
||||||
|
),
|
||||||
|
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
|
||||||
|
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
|
||||||
|
'requests_toolbelt >=0.4.0',
|
||||||
|
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
|
||||||
|
'atomicwrites>=0.1.7'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class PrintRequirements(Command):
|
||||||
|
|
||||||
|
description = 'Prints minimal requirements'
|
||||||
|
|
||||||
|
user_options = []
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for requirement in requirements:
|
||||||
|
print(requirement.replace(">", "=").replace(" ", ""))
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='vdirsyncer',
|
||||||
|
use_scm_version={
|
||||||
|
'write_to': 'vdirsyncer/version.py',
|
||||||
|
},
|
||||||
|
setup_requires=['setuptools_scm'],
|
||||||
|
author='Markus Unterwaditzer',
|
||||||
|
author_email='markus@unterwaditzer.net',
|
||||||
|
url='https://github.com/untitaker/vdirsyncer',
|
||||||
|
description='Synchronize calendars and contacts',
|
||||||
|
license='MIT',
|
||||||
|
long_description=open('README.rst').read(),
|
||||||
|
packages=find_packages(exclude=['tests.*', 'tests']),
|
||||||
|
include_package_data=True,
|
||||||
|
entry_points={
|
||||||
|
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
|
||||||
|
},
|
||||||
|
install_requires=requirements,
|
||||||
|
extras_require={
|
||||||
|
'remotestorage': ['requests-oauthlib']
|
||||||
|
},
|
||||||
|
cmdclass={
|
||||||
|
'minimal_requirements': PrintRequirements
|
||||||
|
}
|
||||||
|
)
|
||||||
4
test-requirements.txt
Normal file
4
test-requirements.txt
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
hypothesis>=3.1
|
||||||
|
pytest
|
||||||
|
pytest-localserver
|
||||||
|
pytest-subtesthack
|
||||||
|
|
@ -1,26 +1,22 @@
|
||||||
"""
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
Test suite for vdirsyncer.
|
Test suite for vdirsyncer.
|
||||||
"""
|
'''
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
import urllib3.exceptions
|
|
||||||
|
|
||||||
from vdirsyncer.vobject import normalize_item
|
from vdirsyncer.utils.vobject import normalize_item
|
||||||
|
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
||||||
|
|
||||||
|
|
||||||
def blow_up(*a, **kw):
|
def blow_up(*a, **kw):
|
||||||
raise AssertionError("Did not expect to be called.")
|
raise AssertionError('Did not expect to be called.')
|
||||||
|
|
||||||
|
|
||||||
def assert_item_equals(a, b):
|
def assert_item_equals(a, b):
|
||||||
assert normalize_item(a) == normalize_item(b)
|
assert normalize_item(a) == normalize_item(b)
|
||||||
|
|
||||||
|
|
||||||
VCARD_TEMPLATE = """BEGIN:VCARD
|
VCARD_TEMPLATE = u'''BEGIN:VCARD
|
||||||
VERSION:3.0
|
VERSION:3.0
|
||||||
FN:Cyrus Daboo
|
FN:Cyrus Daboo
|
||||||
N:Daboo;Cyrus;;;
|
N:Daboo;Cyrus;;;
|
||||||
|
|
@ -34,9 +30,9 @@ TEL;TYPE=FAX:412 605 0705
|
||||||
URL;VALUE=URI:http://www.example.com
|
URL;VALUE=URI:http://www.example.com
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VCARD"""
|
END:VCARD'''
|
||||||
|
|
||||||
TASK_TEMPLATE = """BEGIN:VCALENDAR
|
TASK_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
||||||
BEGIN:VTODO
|
BEGIN:VTODO
|
||||||
|
|
@ -48,63 +44,33 @@ SUMMARY:Book: Kowlani - Tödlicher Staub
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VTODO
|
END:VTODO
|
||||||
END:VCALENDAR"""
|
END:VCALENDAR'''
|
||||||
|
|
||||||
|
|
||||||
BARE_EVENT_TEMPLATE = """BEGIN:VEVENT
|
BARE_EVENT_TEMPLATE = u'''BEGIN:VEVENT
|
||||||
DTSTART:19970714T170000Z
|
DTSTART:19970714T170000Z
|
||||||
DTEND:19970715T035959Z
|
DTEND:19970715T035959Z
|
||||||
SUMMARY:Bastille Day Party
|
SUMMARY:Bastille Day Party
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VEVENT"""
|
END:VEVENT'''
|
||||||
|
|
||||||
|
|
||||||
EVENT_TEMPLATE = (
|
EVENT_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||||
"""BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
"""
|
''' + BARE_EVENT_TEMPLATE + u'''
|
||||||
+ BARE_EVENT_TEMPLATE
|
END:VCALENDAR'''
|
||||||
+ """
|
|
||||||
END:VCALENDAR"""
|
|
||||||
)
|
|
||||||
|
|
||||||
EVENT_WITH_TIMEZONE_TEMPLATE = (
|
|
||||||
"""BEGIN:VCALENDAR
|
|
||||||
BEGIN:VTIMEZONE
|
|
||||||
TZID:Europe/Rome
|
|
||||||
X-LIC-LOCATION:Europe/Rome
|
|
||||||
BEGIN:DAYLIGHT
|
|
||||||
TZOFFSETFROM:+0100
|
|
||||||
TZOFFSETTO:+0200
|
|
||||||
TZNAME:CEST
|
|
||||||
DTSTART:19700329T020000
|
|
||||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
|
|
||||||
END:DAYLIGHT
|
|
||||||
BEGIN:STANDARD
|
|
||||||
TZOFFSETFROM:+0200
|
|
||||||
TZOFFSETTO:+0100
|
|
||||||
TZNAME:CET
|
|
||||||
DTSTART:19701025T030000
|
|
||||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
|
||||||
END:STANDARD
|
|
||||||
END:VTIMEZONE
|
|
||||||
"""
|
|
||||||
+ BARE_EVENT_TEMPLATE
|
|
||||||
+ """
|
|
||||||
END:VCALENDAR"""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
SIMPLE_TEMPLATE = """BEGIN:FOO
|
SIMPLE_TEMPLATE = u'''BEGIN:FOO
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
HAHA:YES
|
HAHA:YES
|
||||||
END:FOO"""
|
END:FOO'''
|
||||||
|
|
||||||
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
|
printable_characters_strategy = st.text(
|
||||||
|
st.characters(blacklist_categories=(
|
||||||
uid_strategy = st.text(
|
'Cc', 'Cs'
|
||||||
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
))
|
||||||
).filter(lambda x: x.strip() == x)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,34 +1,31 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import pytest
|
|
||||||
from click.testing import CliRunner
|
from click.testing import CliRunner
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import vdirsyncer.cli as cli
|
import vdirsyncer.cli as cli
|
||||||
|
|
||||||
|
|
||||||
class _CustomRunner:
|
class _CustomRunner(object):
|
||||||
def __init__(self, tmpdir):
|
def __init__(self, tmpdir):
|
||||||
self.tmpdir = tmpdir
|
self.tmpdir = tmpdir
|
||||||
self.cfg = tmpdir.join("config")
|
self.cfg = tmpdir.join('config')
|
||||||
self.runner = CliRunner()
|
self.runner = CliRunner()
|
||||||
|
|
||||||
def invoke(self, args, env=None, **kwargs):
|
def invoke(self, args, env=None, **kwargs):
|
||||||
env = env or {}
|
env = env or {}
|
||||||
env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg))
|
env.setdefault('VDIRSYNCER_CONFIG', str(self.cfg))
|
||||||
return self.runner.invoke(cli.app, args, env=env, **kwargs)
|
return self.runner.invoke(cli.app, args, env=env, **kwargs)
|
||||||
|
|
||||||
def write_with_general(self, data):
|
def write_with_general(self, data):
|
||||||
self.cfg.write(
|
self.cfg.write(dedent('''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
[general]
|
[general]
|
||||||
status_path = "{}/status/"
|
status_path = {}/status/
|
||||||
"""
|
''').format(str(self.tmpdir)))
|
||||||
).format(str(self.tmpdir))
|
self.cfg.write(data, mode='a')
|
||||||
)
|
|
||||||
self.cfg.write(data, mode="a")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
205
tests/cli/test_config.py
Normal file
205
tests/cli/test_config.py
Normal file
|
|
@ -0,0 +1,205 @@
|
||||||
|
import io
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import vdirsyncer.cli.utils # noqa
|
||||||
|
from vdirsyncer import cli, exceptions
|
||||||
|
from vdirsyncer.cli.config import parse_config_value, \
|
||||||
|
read_config as _read_config
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def read_config(tmpdir):
|
||||||
|
def inner(cfg):
|
||||||
|
f = io.StringIO(dedent(cfg.format(base=str(tmpdir))))
|
||||||
|
return _read_config(f)
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_config(read_config, monkeypatch):
|
||||||
|
errors = []
|
||||||
|
monkeypatch.setattr('vdirsyncer.cli.cli_logger.error', errors.append)
|
||||||
|
general, pairs, storages = read_config(u'''
|
||||||
|
[general]
|
||||||
|
status_path = /tmp/status/
|
||||||
|
|
||||||
|
[pair bob]
|
||||||
|
a = bob_a
|
||||||
|
b = bob_b
|
||||||
|
foo = bar
|
||||||
|
bam = true
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage bob_a]
|
||||||
|
type = filesystem
|
||||||
|
path = /tmp/contacts/
|
||||||
|
fileext = .vcf
|
||||||
|
yesno = false
|
||||||
|
number = 42
|
||||||
|
|
||||||
|
[storage bob_b]
|
||||||
|
type = carddav
|
||||||
|
|
||||||
|
[bogus]
|
||||||
|
lol = true
|
||||||
|
''')
|
||||||
|
|
||||||
|
assert general == {'status_path': '/tmp/status/'}
|
||||||
|
assert pairs == {'bob': ('bob_a', 'bob_b',
|
||||||
|
{'collections': None, 'bam': True, 'foo': 'bar'})}
|
||||||
|
assert storages == {
|
||||||
|
'bob_a': {'type': 'filesystem', 'path': '/tmp/contacts/', 'fileext':
|
||||||
|
'.vcf', 'yesno': False, 'number': 42,
|
||||||
|
'instance_name': 'bob_a'},
|
||||||
|
'bob_b': {'type': 'carddav', 'instance_name': 'bob_b'}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert len(errors) == 1
|
||||||
|
assert errors[0].startswith('Unknown section')
|
||||||
|
assert 'bogus' in errors[0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_collections_param(read_config, monkeypatch):
|
||||||
|
errorlog = []
|
||||||
|
monkeypatch.setattr('vdirsyncer.cli.cli_logger.error', errorlog.append)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(u'''
|
||||||
|
[general]
|
||||||
|
status_path = /tmp/status/
|
||||||
|
|
||||||
|
[pair bob]
|
||||||
|
a = bob_a
|
||||||
|
b = bob_b
|
||||||
|
|
||||||
|
[storage bob_a]
|
||||||
|
type = lmao
|
||||||
|
|
||||||
|
[storage bob_b]
|
||||||
|
type = lmao
|
||||||
|
''')
|
||||||
|
|
||||||
|
assert 'collections parameter missing' in str(excinfo.value)
|
||||||
|
assert not errorlog
|
||||||
|
|
||||||
|
|
||||||
|
def test_storage_instance_from_config(monkeypatch):
|
||||||
|
def lol(**kw):
|
||||||
|
assert kw == {'foo': 'bar', 'baz': 1}
|
||||||
|
return 'OK'
|
||||||
|
|
||||||
|
monkeypatch.setitem(cli.utils.storage_names._storages,
|
||||||
|
'lol', lol)
|
||||||
|
config = {'type': 'lol', 'foo': 'bar', 'baz': 1}
|
||||||
|
assert cli.utils.storage_instance_from_config(config) == 'OK'
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_general_section(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(u'''
|
||||||
|
[pair my_pair]
|
||||||
|
a = my_a
|
||||||
|
b = my_b
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/path_a/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/path_b/
|
||||||
|
fileext = .txt
|
||||||
|
''')
|
||||||
|
|
||||||
|
assert 'Invalid general section.' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_wrong_general_section(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(u'''
|
||||||
|
[general]
|
||||||
|
wrong = true
|
||||||
|
''')
|
||||||
|
|
||||||
|
assert 'Invalid general section.' in str(excinfo.value)
|
||||||
|
assert excinfo.value.problems == [
|
||||||
|
'general section doesn\'t take the parameters: wrong',
|
||||||
|
'general section is missing the parameters: status_path'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_storage_name():
|
||||||
|
f = io.StringIO(dedent(u'''
|
||||||
|
[general]
|
||||||
|
status_path = {base}/status/
|
||||||
|
|
||||||
|
[storage foo.bar]
|
||||||
|
'''))
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
_read_config(f)
|
||||||
|
|
||||||
|
assert 'invalid characters' in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_config_value(capsys):
|
||||||
|
invalid = object()
|
||||||
|
|
||||||
|
def x(s):
|
||||||
|
try:
|
||||||
|
rv = parse_config_value(s)
|
||||||
|
except ValueError:
|
||||||
|
return invalid
|
||||||
|
else:
|
||||||
|
warnings = capsys.readouterr()[1]
|
||||||
|
return rv, len(warnings.splitlines())
|
||||||
|
|
||||||
|
assert x('123 # comment!') is invalid
|
||||||
|
|
||||||
|
assert x('True') == ('True', 1)
|
||||||
|
assert x('False') == ('False', 1)
|
||||||
|
assert x('Yes') == ('Yes', 1)
|
||||||
|
assert x('None') == ('None', 1)
|
||||||
|
assert x('"True"') == ('True', 0)
|
||||||
|
assert x('"False"') == ('False', 0)
|
||||||
|
|
||||||
|
assert x('"123 # comment!"') == ('123 # comment!', 0)
|
||||||
|
assert x('true') == (True, 0)
|
||||||
|
assert x('false') == (False, 0)
|
||||||
|
assert x('null') == (None, 0)
|
||||||
|
assert x('3.14') == (3.14, 0)
|
||||||
|
assert x('') == ('', 0)
|
||||||
|
assert x('""') == ('', 0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_collections_arg():
|
||||||
|
f = io.StringIO(dedent(u'''
|
||||||
|
[general]
|
||||||
|
status_path = /tmp/status/
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = [null]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = /tmp/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = /tmp/bar/
|
||||||
|
fileext = .txt
|
||||||
|
'''))
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
_read_config(f)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
'Section `pair foobar`: `collections` parameter must be a list of '
|
||||||
|
'collection names (strings!) or `null`.'
|
||||||
|
) in str(excinfo.value)
|
||||||
62
tests/cli/test_discover.py
Normal file
62
tests/cli/test_discover.py
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_command(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/bar/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = ["from a"]
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir('foo')
|
||||||
|
bar = tmpdir.mkdir('bar')
|
||||||
|
|
||||||
|
for x in 'abc':
|
||||||
|
foo.mkdir(x)
|
||||||
|
bar.mkdir(x)
|
||||||
|
bar.mkdir('d')
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
lines = result.output.splitlines()
|
||||||
|
assert lines[0].startswith('Discovering')
|
||||||
|
assert 'Syncing foobar/a' in lines
|
||||||
|
assert 'Syncing foobar/b' in lines
|
||||||
|
assert 'Syncing foobar/c' in lines
|
||||||
|
assert 'Syncing foobar/d' not in lines
|
||||||
|
|
||||||
|
foo.mkdir('d')
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
assert 'Syncing foobar/a' in lines
|
||||||
|
assert 'Syncing foobar/b' in lines
|
||||||
|
assert 'Syncing foobar/c' in lines
|
||||||
|
assert 'Syncing foobar/d' not in result.output
|
||||||
|
|
||||||
|
result = runner.invoke(['discover'])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
assert 'Syncing foobar/a' in lines
|
||||||
|
assert 'Syncing foobar/b' in lines
|
||||||
|
assert 'Syncing foobar/c' in lines
|
||||||
|
assert 'Syncing foobar/d' in result.output
|
||||||
|
|
||||||
|
# Check for redundant data that is already in the config. This avoids
|
||||||
|
# copying passwords from the config too.
|
||||||
|
assert 'fileext' not in tmpdir \
|
||||||
|
.join('status') \
|
||||||
|
.join('foobar.collections') \
|
||||||
|
.read()
|
||||||
173
tests/cli/test_fetchparams.py
Normal file
173
tests/cli/test_fetchparams.py
Normal file
|
|
@ -0,0 +1,173 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from hypothesis import given
|
||||||
|
import hypothesis.strategies as st
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.cli.fetchparams import STRATEGIES, expand_fetch_params
|
||||||
|
from vdirsyncer.utils.compat import PY2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mystrategy(monkeypatch):
|
||||||
|
def strategy(x):
|
||||||
|
calls.append(x)
|
||||||
|
return x
|
||||||
|
calls = []
|
||||||
|
monkeypatch.setitem(STRATEGIES, 'mystrategy', strategy)
|
||||||
|
return calls
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def value_cache(monkeypatch):
|
||||||
|
_cache = {}
|
||||||
|
|
||||||
|
class FakeContext(object):
|
||||||
|
fetched_params = _cache
|
||||||
|
|
||||||
|
def find_object(self, _):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def get_context(*a, **kw):
|
||||||
|
return FakeContext()
|
||||||
|
|
||||||
|
monkeypatch.setattr('click.get_current_context', get_context)
|
||||||
|
return _cache
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_password_from_command(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/foo/
|
||||||
|
fileext.fetch = ["command", "echo", ".txt"]
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/bar/
|
||||||
|
fileext.fetch = ["prompt", "Fileext for bar"]
|
||||||
|
'''.format(base=str(tmpdir))))
|
||||||
|
|
||||||
|
foo = tmpdir.ensure('foo', dir=True)
|
||||||
|
foo.ensure('a', dir=True)
|
||||||
|
foo.ensure('b', dir=True)
|
||||||
|
foo.ensure('c', dir=True)
|
||||||
|
bar = tmpdir.ensure('bar', dir=True)
|
||||||
|
bar.ensure('a', dir=True)
|
||||||
|
bar.ensure('b', dir=True)
|
||||||
|
bar.ensure('c', dir=True)
|
||||||
|
|
||||||
|
result = runner.invoke(['discover'], input='.asdf\n')
|
||||||
|
assert not result.exception
|
||||||
|
status = tmpdir.join('status').join('foobar.collections').read()
|
||||||
|
assert 'foo' in status
|
||||||
|
assert 'bar' in status
|
||||||
|
assert 'asdf' not in status
|
||||||
|
assert 'txt' not in status
|
||||||
|
|
||||||
|
foo.join('a').join('foo.txt').write('BEGIN:VCARD\nUID:foo\nEND:VCARD')
|
||||||
|
result = runner.invoke(['sync'], input='.asdf\n')
|
||||||
|
assert not result.exception
|
||||||
|
assert [x.basename for x in bar.join('a').listdir()] == ['foo.asdf']
|
||||||
|
|
||||||
|
|
||||||
|
def test_key_conflict(monkeypatch, mystrategy):
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
expand_fetch_params({
|
||||||
|
'foo': 'bar',
|
||||||
|
'foo.fetch': ['mystrategy', 'baz']
|
||||||
|
})
|
||||||
|
|
||||||
|
assert 'Can\'t set foo.fetch and foo.' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(PY2, reason='Don\'t care about Python 2')
|
||||||
|
@given(s=st.text(), t=st.text(min_size=1))
|
||||||
|
def test_fuzzing(s, t, mystrategy):
|
||||||
|
config = expand_fetch_params({
|
||||||
|
'{}.fetch'.format(s): ['mystrategy', t]
|
||||||
|
})
|
||||||
|
|
||||||
|
assert config[s] == t
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('value', [
|
||||||
|
[],
|
||||||
|
'lol',
|
||||||
|
42
|
||||||
|
])
|
||||||
|
def test_invalid_fetch_value(mystrategy, value):
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
expand_fetch_params({
|
||||||
|
'foo.fetch': value
|
||||||
|
})
|
||||||
|
|
||||||
|
assert 'Expected a list' in str(excinfo.value) or \
|
||||||
|
'Expected list of length > 0' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_unknown_strategy():
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
expand_fetch_params({
|
||||||
|
'foo.fetch': ['unreal', 'asdf']
|
||||||
|
})
|
||||||
|
|
||||||
|
assert 'Unknown strategy' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_caching(monkeypatch, mystrategy, value_cache):
|
||||||
|
orig_cfg = {'foo.fetch': ['mystrategy', 'asdf']}
|
||||||
|
|
||||||
|
rv = expand_fetch_params(orig_cfg)
|
||||||
|
assert rv['foo'] == 'asdf'
|
||||||
|
assert mystrategy == ['asdf']
|
||||||
|
assert len(value_cache) == 1
|
||||||
|
|
||||||
|
rv = expand_fetch_params(orig_cfg)
|
||||||
|
assert rv['foo'] == 'asdf'
|
||||||
|
assert mystrategy == ['asdf']
|
||||||
|
assert len(value_cache) == 1
|
||||||
|
|
||||||
|
value_cache.clear()
|
||||||
|
rv = expand_fetch_params(orig_cfg)
|
||||||
|
assert rv['foo'] == 'asdf'
|
||||||
|
assert mystrategy == ['asdf'] * 2
|
||||||
|
assert len(value_cache) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_failed_strategy(monkeypatch, value_cache):
|
||||||
|
calls = []
|
||||||
|
|
||||||
|
def strategy(x):
|
||||||
|
calls.append(x)
|
||||||
|
raise KeyboardInterrupt()
|
||||||
|
|
||||||
|
monkeypatch.setitem(STRATEGIES, 'mystrategy', strategy)
|
||||||
|
|
||||||
|
orig_cfg = {'foo.fetch': ['mystrategy', 'asdf']}
|
||||||
|
|
||||||
|
for _ in range(2):
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
expand_fetch_params(orig_cfg)
|
||||||
|
|
||||||
|
assert len(value_cache) == 1
|
||||||
|
assert len(calls) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_value(monkeypatch, mystrategy):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
expand_fetch_params({
|
||||||
|
'foo.fetch': ['mystrategy', '']
|
||||||
|
})
|
||||||
|
|
||||||
|
assert 'Empty value for foo.fetch, this most likely indicates an error' \
|
||||||
|
in str(excinfo.value)
|
||||||
420
tests/cli/test_main.py
Normal file
420
tests/cli/test_main.py
Normal file
|
|
@ -0,0 +1,420 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import json
|
||||||
|
import unicodedata
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from click.testing import CliRunner
|
||||||
|
|
||||||
|
from hypothesis import example, given
|
||||||
|
import hypothesis.strategies as st
|
||||||
|
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import vdirsyncer.cli as cli
|
||||||
|
from vdirsyncer.utils.compat import PY2, to_native, to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def test_entry_points(monkeypatch, capsys):
|
||||||
|
monkeypatch.setattr('sys.argv', ['--help'])
|
||||||
|
with pytest.raises(SystemExit) as excinfo:
|
||||||
|
load_entry_point('vdirsyncer', 'console_scripts', 'vdirsyncer')()
|
||||||
|
|
||||||
|
assert excinfo.value.code == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_run(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair my_pair]
|
||||||
|
a = my_a
|
||||||
|
b = my_b
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_a/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_b/
|
||||||
|
fileext = .txt
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
tmpdir.mkdir('path_a')
|
||||||
|
tmpdir.mkdir('path_b')
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
tmpdir.join('path_a/haha.txt').write('UID:haha')
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert 'Copying (uploading) item haha to my_b' in result.output
|
||||||
|
assert tmpdir.join('path_b/haha.txt').read() == 'UID:haha'
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_inexistant_pair(tmpdir, runner):
|
||||||
|
runner.write_with_general("")
|
||||||
|
result = runner.invoke(['sync', 'foo'])
|
||||||
|
assert result.exception
|
||||||
|
assert 'pair foo does not exist.' in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_debug_connections(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair my_pair]
|
||||||
|
a = my_a
|
||||||
|
b = my_b
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_a/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_b/
|
||||||
|
fileext = .txt
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
tmpdir.mkdir('path_a')
|
||||||
|
tmpdir.mkdir('path_b')
|
||||||
|
|
||||||
|
result = runner.invoke(['-vdebug', 'sync', '--max-workers=3'])
|
||||||
|
assert 'using 3 maximal workers' in result.output.lower()
|
||||||
|
|
||||||
|
result = runner.invoke(['-vdebug', 'sync'])
|
||||||
|
assert 'using 1 maximal workers' in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_storage(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair my_pair]
|
||||||
|
a = my_a
|
||||||
|
b = my_b
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_a/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/path_b/
|
||||||
|
fileext = .txt
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
tmpdir.mkdir('path_a')
|
||||||
|
tmpdir.mkdir('path_b')
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
tmpdir.join('path_a/haha.txt').write('UID:haha')
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
tmpdir.join('path_b/haha.txt').remove()
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
lines = result.output.splitlines()
|
||||||
|
assert len(lines) == 2
|
||||||
|
assert lines[0] == 'Syncing my_pair'
|
||||||
|
assert lines[1].startswith('error: my_pair: '
|
||||||
|
'Storage "my_b" was completely emptied.')
|
||||||
|
assert result.exception
|
||||||
|
|
||||||
|
|
||||||
|
def test_verbosity(tmpdir):
|
||||||
|
runner = CliRunner()
|
||||||
|
config_file = tmpdir.join('config')
|
||||||
|
config_file.write('')
|
||||||
|
|
||||||
|
result = runner.invoke(
|
||||||
|
cli.app, ['--verbosity=HAHA', 'sync'],
|
||||||
|
env={'VDIRSYNCER_CONFIG': str(config_file)}
|
||||||
|
)
|
||||||
|
assert result.exception
|
||||||
|
assert 'invalid value for "--verbosity"' in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_deprecated_item_status(tmpdir):
|
||||||
|
f = tmpdir.join('mypair.items')
|
||||||
|
f.write(dedent('''
|
||||||
|
["ident", ["href_a", "etag_a", "href_b", "etag_b"]]
|
||||||
|
["ident_two", ["href_a", "etag_a", "href_b", "etag_b"]]
|
||||||
|
''').strip())
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'ident': ['href_a', 'etag_a', 'href_b', 'etag_b'],
|
||||||
|
'ident_two': ['href_a', 'etag_a', 'href_b', 'etag_b']
|
||||||
|
}
|
||||||
|
|
||||||
|
assert cli.utils.load_status(
|
||||||
|
str(tmpdir), 'mypair', data_type='items') == data
|
||||||
|
|
||||||
|
cli.utils.save_status(
|
||||||
|
str(tmpdir), 'mypair', data_type='items', data=data)
|
||||||
|
assert cli.utils.load_status(
|
||||||
|
str(tmpdir), 'mypair', data_type='items') == data
|
||||||
|
|
||||||
|
|
||||||
|
def test_collections_cache_invalidation(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/bar/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir('foo')
|
||||||
|
bar = tmpdir.mkdir('bar')
|
||||||
|
for x in 'abc':
|
||||||
|
foo.mkdir(x)
|
||||||
|
bar.mkdir(x)
|
||||||
|
foo.join('a/itemone.txt').write('UID:itemone')
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert not result.exception
|
||||||
|
assert 'detected change in config file' not in result.output.lower()
|
||||||
|
|
||||||
|
rv = bar.join('a').listdir()
|
||||||
|
assert len(rv) == 1
|
||||||
|
assert rv[0].basename == 'itemone.txt'
|
||||||
|
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/bar2/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
for entry in tmpdir.join('status').listdir():
|
||||||
|
if not str(entry).endswith('.collections'):
|
||||||
|
entry.remove()
|
||||||
|
bar2 = tmpdir.mkdir('bar2')
|
||||||
|
for x in 'abc':
|
||||||
|
bar2.mkdir(x)
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert 'detected change in config file' in result.output.lower()
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
rv = bar.join('a').listdir()
|
||||||
|
rv2 = bar2.join('a').listdir()
|
||||||
|
assert len(rv) == len(rv2) == 1
|
||||||
|
assert rv[0].basename == rv2[0].basename == 'itemone.txt'
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_pairs_as_cli_arg(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/bar/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
for base in ('foo', 'bar'):
|
||||||
|
base = tmpdir.mkdir(base)
|
||||||
|
for c in 'abc':
|
||||||
|
base.mkdir(c)
|
||||||
|
|
||||||
|
result = runner.invoke(['sync', 'foobar/d'])
|
||||||
|
assert result.exception
|
||||||
|
assert 'pair foobar: collection d not found' in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_multiple_pairs(tmpdir, runner):
|
||||||
|
def get_cfg():
|
||||||
|
for name_a, name_b in ('foo', 'bar'), ('bam', 'baz'):
|
||||||
|
yield dedent('''
|
||||||
|
[pair {a}{b}]
|
||||||
|
a = {a}
|
||||||
|
b = {b}
|
||||||
|
collections = null
|
||||||
|
''').format(a=name_a, b=name_b)
|
||||||
|
|
||||||
|
for name in name_a, name_b:
|
||||||
|
yield dedent('''
|
||||||
|
[storage {name}]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/{name}/
|
||||||
|
fileext = .txt
|
||||||
|
''').format(name=name, base=str(tmpdir))
|
||||||
|
|
||||||
|
runner.write_with_general(''.join(get_cfg()))
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert set(result.output.splitlines()) > set([
|
||||||
|
'Discovering collections for pair bambaz',
|
||||||
|
'Discovering collections for pair foobar',
|
||||||
|
'Syncing bambaz',
|
||||||
|
'Syncing foobar',
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
@given(collections=st.sets(
|
||||||
|
st.text(
|
||||||
|
st.characters(
|
||||||
|
blacklist_characters=set(
|
||||||
|
u'./\x00' # Invalid chars on POSIX filesystems
|
||||||
|
+ (u';' if PY2 else u'') # https://bugs.python.org/issue16374
|
||||||
|
),
|
||||||
|
# Surrogates can't be encoded to utf-8 in Python
|
||||||
|
blacklist_categories=set(['Cs'])
|
||||||
|
),
|
||||||
|
min_size=1,
|
||||||
|
max_size=50
|
||||||
|
),
|
||||||
|
min_size=1
|
||||||
|
))
|
||||||
|
@example(collections=[u'persönlich'])
|
||||||
|
def test_create_collections(subtest, collections):
|
||||||
|
collections = set(to_native(x, 'utf-8') for x in collections)
|
||||||
|
|
||||||
|
@subtest
|
||||||
|
def test_inner(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = {colls}
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/bar/
|
||||||
|
fileext = .txt
|
||||||
|
'''.format(base=str(tmpdir), colls=json.dumps(list(collections)))))
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert result.exception
|
||||||
|
entries = set(x.basename for x in tmpdir.listdir())
|
||||||
|
assert 'foo' not in entries and 'bar' not in entries
|
||||||
|
|
||||||
|
result = runner.invoke(
|
||||||
|
['sync'],
|
||||||
|
input='y\n' * 2 * (len(collections) + 1)
|
||||||
|
)
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
# Macs normally operate on the HFS+ file system which normalizes paths.
|
||||||
|
# That is, if you save a file with accented é in it (u'\xe9') for
|
||||||
|
# example, and then do a os.listdir you will see that the filename got
|
||||||
|
# converted to u'e\u0301'. This is normal unicode NFD normalization
|
||||||
|
# that the Python unicodedata module can handle.
|
||||||
|
#
|
||||||
|
# Quoted from
|
||||||
|
# https://stackoverflow.com/questions/18137554/how-to-convert-path-to-mac-os-x-path-the-almost-nfd-normal-form # noqa
|
||||||
|
u = lambda xs: set(
|
||||||
|
unicodedata.normalize('NFKD', to_unicode(x, 'utf-8'))
|
||||||
|
for x in xs
|
||||||
|
)
|
||||||
|
assert u(x.basename for x in tmpdir.join('foo').listdir()) == \
|
||||||
|
u(x.basename for x in tmpdir.join('bar').listdir()) == \
|
||||||
|
u(collections)
|
||||||
|
|
||||||
|
result = runner.invoke(
|
||||||
|
['sync'] + ['foobar/' + x for x in collections]
|
||||||
|
)
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
|
||||||
|
def test_ident_conflict(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/bar/
|
||||||
|
fileext = .txt
|
||||||
|
'''.format(base=str(tmpdir))))
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir('foo')
|
||||||
|
tmpdir.mkdir('bar')
|
||||||
|
|
||||||
|
foo.join('one.txt').write('UID:1')
|
||||||
|
foo.join('two.txt').write('UID:1')
|
||||||
|
foo.join('three.txt').write('UID:1')
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert result.exception
|
||||||
|
assert ('error: foobar: Storage "foo" contains multiple items with the '
|
||||||
|
'same UID or even content') in result.output
|
||||||
|
assert sorted([
|
||||||
|
'one.txt' in result.output,
|
||||||
|
'two.txt' in result.output,
|
||||||
|
'three.txt' in result.output,
|
||||||
|
]) == [False, True, True]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('existing,missing', [
|
||||||
|
('foo', 'bar'),
|
||||||
|
('bar', 'foo'),
|
||||||
|
])
|
||||||
|
def test_unknown_storage(tmpdir, runner, existing, missing):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[pair foobar]
|
||||||
|
a = foo
|
||||||
|
b = bar
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage {existing}]
|
||||||
|
type = filesystem
|
||||||
|
path = {base}/{existing}/
|
||||||
|
fileext = .txt
|
||||||
|
'''.format(base=str(tmpdir), existing=existing)))
|
||||||
|
|
||||||
|
tmpdir.mkdir(existing)
|
||||||
|
|
||||||
|
result = runner.invoke(['sync'])
|
||||||
|
assert result.exception
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"Storage '{missing}' not found. "
|
||||||
|
"These are the configured storages: ['{existing}']"
|
||||||
|
.format(missing=missing, existing=existing)
|
||||||
|
) in result.output
|
||||||
83
tests/cli/test_repair.py
Normal file
83
tests/cli/test_repair.py
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from hypothesis import given, settings
|
||||||
|
import hypothesis.strategies as st
|
||||||
|
|
||||||
|
from vdirsyncer.repair import repair_storage
|
||||||
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
from vdirsyncer.utils import href_safe
|
||||||
|
from vdirsyncer.utils.vobject import Item
|
||||||
|
|
||||||
|
uid_strategy = st.text(st.characters(blacklist_categories=(
|
||||||
|
'Zs', 'Zl', 'Zp',
|
||||||
|
'Cc', 'Cs'
|
||||||
|
)))
|
||||||
|
|
||||||
|
|
||||||
|
@given(uid=uid_strategy)
|
||||||
|
@settings(perform_health_check=False) # Using the random module for UIDs
|
||||||
|
def test_repair_uids(uid):
|
||||||
|
s = MemoryStorage()
|
||||||
|
s.items = {
|
||||||
|
'one': (
|
||||||
|
'asdf',
|
||||||
|
Item(u'BEGIN:VCARD\nFN:Hans\nUID:{}\nEND:VCARD'.format(uid))
|
||||||
|
),
|
||||||
|
'two': (
|
||||||
|
'asdf',
|
||||||
|
Item(u'BEGIN:VCARD\nFN:Peppi\nUID:{}\nEND:VCARD'.format(uid))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
|
||||||
|
assert uid1 == uid2
|
||||||
|
|
||||||
|
repair_storage(s)
|
||||||
|
|
||||||
|
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
|
||||||
|
assert uid1 != uid2
|
||||||
|
|
||||||
|
|
||||||
|
@given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
|
||||||
|
@settings(perform_health_check=False) # Using the random module for UIDs
|
||||||
|
def test_repair_unsafe_uids(uid):
|
||||||
|
s = MemoryStorage()
|
||||||
|
item = Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid))
|
||||||
|
print(repr(item.raw))
|
||||||
|
href, etag = s.upload(item)
|
||||||
|
assert s.get(href)[0].uid == uid
|
||||||
|
assert not href_safe(uid)
|
||||||
|
|
||||||
|
repair_storage(s)
|
||||||
|
|
||||||
|
new_href = list(s.list())[0][0]
|
||||||
|
assert href_safe(new_href)
|
||||||
|
newuid = s.get(new_href)[0].uid
|
||||||
|
assert href_safe(newuid)
|
||||||
|
|
||||||
|
|
||||||
|
def test_full(tmpdir, runner):
|
||||||
|
runner.write_with_general(dedent('''
|
||||||
|
[storage foo]
|
||||||
|
type = filesystem
|
||||||
|
path = {0}/foo/
|
||||||
|
fileext = .txt
|
||||||
|
''').format(str(tmpdir)))
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir('foo')
|
||||||
|
|
||||||
|
result = runner.invoke(['repair', 'foo'], input='y')
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
|
||||||
|
foo.join('toobroken.txt').write('')
|
||||||
|
|
||||||
|
result = runner.invoke(['repair', 'foo'], input='y')
|
||||||
|
assert not result.exception
|
||||||
|
assert 'No UID' in result.output
|
||||||
|
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
|
||||||
|
in result.output
|
||||||
|
new_fname, = [x for x in foo.listdir() if 'toobroken' not in str(x)]
|
||||||
|
assert 'UID:' in new_fname.read()
|
||||||
26
tests/cli/test_utils.py
Normal file
26
tests/cli/test_utils.py
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
from hypothesis import given
|
||||||
|
from hypothesis.strategies import (
|
||||||
|
binary,
|
||||||
|
booleans,
|
||||||
|
complex_numbers,
|
||||||
|
floats,
|
||||||
|
integers,
|
||||||
|
none,
|
||||||
|
one_of,
|
||||||
|
text
|
||||||
|
)
|
||||||
|
|
||||||
|
from vdirsyncer.cli.utils import coerce_native
|
||||||
|
|
||||||
|
|
||||||
|
@given(one_of(
|
||||||
|
binary(),
|
||||||
|
booleans(),
|
||||||
|
complex_numbers(),
|
||||||
|
floats(),
|
||||||
|
integers(),
|
||||||
|
none(),
|
||||||
|
text()
|
||||||
|
))
|
||||||
|
def test_coerce_native_fuzzing(s):
|
||||||
|
coerce_native(s)
|
||||||
|
|
@ -1,70 +1,46 @@
|
||||||
"""
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
General-purpose fixtures for vdirsyncer's testsuite.
|
General-purpose fixtures for vdirsyncer's testsuite.
|
||||||
"""
|
'''
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import click_log
|
import click_log
|
||||||
|
|
||||||
|
from hypothesis import HealthCheck, Verbosity, settings
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
|
||||||
from hypothesis import HealthCheck
|
|
||||||
from hypothesis import Verbosity
|
|
||||||
from hypothesis import settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG)
|
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def suppress_py2_warning(monkeypatch):
|
||||||
|
monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda: None)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pytest_benchmark
|
import pytest_benchmark
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def benchmark():
|
def benchmark():
|
||||||
return lambda x: x()
|
return lambda x: x()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
del pytest_benchmark
|
del pytest_benchmark
|
||||||
|
|
||||||
|
settings.register_profile("ci", settings(
|
||||||
|
max_examples=1000,
|
||||||
|
verbosity=Verbosity.verbose,
|
||||||
|
suppress_health_check=[HealthCheck.too_slow]
|
||||||
|
))
|
||||||
|
settings.register_profile("deterministic", settings(
|
||||||
|
derandomize=True,
|
||||||
|
))
|
||||||
|
|
||||||
settings.register_profile(
|
if os.getenv('DETERMINISTIC_TESTS').lower == 'true':
|
||||||
"ci",
|
|
||||||
settings(
|
|
||||||
max_examples=1000,
|
|
||||||
verbosity=Verbosity.verbose,
|
|
||||||
suppress_health_check=[HealthCheck.too_slow],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
settings.register_profile(
|
|
||||||
"deterministic",
|
|
||||||
settings(
|
|
||||||
derandomize=True,
|
|
||||||
suppress_health_check=list(HealthCheck),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
|
||||||
|
|
||||||
if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true":
|
|
||||||
settings.load_profile("deterministic")
|
settings.load_profile("deterministic")
|
||||||
elif os.environ.get("CI", "false").lower() == "true":
|
elif os.getenv('CI').lower == 'true':
|
||||||
settings.load_profile("ci")
|
settings.load_profile("ci")
|
||||||
else:
|
|
||||||
settings.load_profile("dev")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def aio_session():
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
yield session
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def aio_connector():
|
|
||||||
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
|
||||||
yield conn
|
|
||||||
|
|
|
||||||
|
|
@ -1,29 +1,23 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import textwrap
|
|
||||||
import uuid
|
|
||||||
from urllib.parse import quote as urlquote
|
|
||||||
from urllib.parse import unquote as urlunquote
|
|
||||||
|
|
||||||
import aiostream
|
from hypothesis import given
|
||||||
|
import hypothesis.strategies as st
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
|
||||||
|
|
||||||
from tests import EVENT_TEMPLATE
|
import vdirsyncer.exceptions as exceptions
|
||||||
from tests import TASK_TEMPLATE
|
from vdirsyncer.storage.base import Item, normalize_meta_value
|
||||||
from tests import VCARD_TEMPLATE
|
from vdirsyncer.utils.compat import iteritems, text_type, urlquote, urlunquote
|
||||||
from tests import assert_item_equals
|
|
||||||
from tests import normalize_item
|
from .. import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE, \
|
||||||
from vdirsyncer import exceptions
|
assert_item_equals, printable_characters_strategy
|
||||||
from vdirsyncer.storage.base import normalize_meta_value
|
|
||||||
from vdirsyncer.vobject import Item
|
|
||||||
|
|
||||||
|
|
||||||
def get_server_mixin(server_name):
|
def get_server_mixin(server_name):
|
||||||
from . import __name__ as base
|
from . import __name__ as base
|
||||||
|
x = __import__('{}.servers.{}'.format(base, server_name), fromlist=[''])
|
||||||
x = __import__(f"{base}.servers.{server_name}", fromlist=[""])
|
|
||||||
return x.ServerMixin
|
return x.ServerMixin
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -33,36 +27,35 @@ def format_item(item_template, uid=None):
|
||||||
return Item(item_template.format(r=r, uid=uid or r))
|
return Item(item_template.format(r=r, uid=uid or r))
|
||||||
|
|
||||||
|
|
||||||
class StorageTests:
|
class StorageTests(object):
|
||||||
storage_class = None
|
storage_class = None
|
||||||
supports_collections = True
|
supports_collections = True
|
||||||
supports_metadata = True
|
supports_metadata = True
|
||||||
|
|
||||||
@pytest.fixture(params=["VEVENT", "VTODO", "VCARD"])
|
@pytest.fixture(params=['VEVENT', 'VTODO', 'VCARD'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
"""Parametrize with all supported item types."""
|
'''Parametrize with all supported item types.'''
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self):
|
||||||
"""
|
'''
|
||||||
Return a function with the following properties:
|
Return a function with the following properties:
|
||||||
|
|
||||||
:param collection: The name of the collection to create and use.
|
:param collection: The name of the collection to create and use.
|
||||||
"""
|
'''
|
||||||
raise NotImplementedError
|
raise NotImplementedError()
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
@pytest.fixture
|
||||||
async def s(self, get_storage_args):
|
def s(self, get_storage_args):
|
||||||
rv = self.storage_class(**await get_storage_args())
|
return self.storage_class(**get_storage_args())
|
||||||
return rv
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_item(self, item_type):
|
def get_item(self, item_type):
|
||||||
template = {
|
template = {
|
||||||
"VEVENT": EVENT_TEMPLATE,
|
'VEVENT': EVENT_TEMPLATE,
|
||||||
"VTODO": TASK_TEMPLATE,
|
'VTODO': TASK_TEMPLATE,
|
||||||
"VCARD": VCARD_TEMPLATE,
|
'VCARD': VCARD_TEMPLATE,
|
||||||
}[item_type]
|
}[item_type]
|
||||||
|
|
||||||
return lambda **kw: format_item(template, **kw)
|
return lambda **kw: format_item(template, **kw)
|
||||||
|
|
@ -70,359 +63,225 @@ class StorageTests:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def requires_collections(self):
|
def requires_collections(self):
|
||||||
if not self.supports_collections:
|
if not self.supports_collections:
|
||||||
pytest.skip("This storage does not support collections.")
|
pytest.skip('This storage does not support collections.')
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def requires_metadata(self):
|
def requires_metadata(self):
|
||||||
if not self.supports_metadata:
|
if not self.supports_metadata:
|
||||||
pytest.skip("This storage does not support metadata.")
|
pytest.skip('This storage does not support metadata.')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_generic(self, s, get_item):
|
||||||
async def test_generic(self, s, get_item):
|
|
||||||
items = [get_item() for i in range(1, 10)]
|
items = [get_item() for i in range(1, 10)]
|
||||||
hrefs = []
|
hrefs = []
|
||||||
for item in items:
|
for item in items:
|
||||||
href, etag = await s.upload(item)
|
hrefs.append(s.upload(item))
|
||||||
if etag is None:
|
|
||||||
_, etag = await s.get(href)
|
|
||||||
hrefs.append((href, etag))
|
|
||||||
hrefs.sort()
|
hrefs.sort()
|
||||||
assert hrefs == sorted(await aiostream.stream.list(s.list()))
|
assert hrefs == sorted(s.list())
|
||||||
for href, etag in hrefs:
|
for href, etag in hrefs:
|
||||||
assert isinstance(href, (str, bytes))
|
assert isinstance(href, (text_type, bytes))
|
||||||
assert isinstance(etag, (str, bytes))
|
assert isinstance(etag, (text_type, bytes))
|
||||||
assert await s.has(href)
|
assert s.has(href)
|
||||||
item, etag2 = await s.get(href)
|
item, etag2 = s.get(href)
|
||||||
assert etag == etag2
|
assert etag == etag2
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_empty_get_multi(self, s):
|
||||||
async def test_empty_get_multi(self, s):
|
assert list(s.get_multi([])) == []
|
||||||
assert await aiostream.stream.list(s.get_multi([])) == []
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_get_multi_duplicates(self, s, get_item):
|
||||||
async def test_get_multi_duplicates(self, s, get_item):
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
(href2, item, etag2), = s.get_multi([href] * 2)
|
||||||
if etag is None:
|
|
||||||
_, etag = await s.get(href)
|
|
||||||
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
|
|
||||||
assert href2 == href
|
assert href2 == href
|
||||||
assert etag2 == etag
|
assert etag2 == etag
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_upload_already_existing(self, s, get_item):
|
||||||
async def test_upload_already_existing(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
await s.upload(item)
|
s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.upload(item)
|
s.upload(item)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_upload(self, s, get_item):
|
||||||
async def test_upload(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, _etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
assert_item_equals((await s.get(href))[0], item)
|
assert_item_equals(s.get(href)[0], item)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_update(self, s, get_item):
|
||||||
async def test_update(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
if etag is None:
|
assert_item_equals(s.get(href)[0], item)
|
||||||
_, etag = await s.get(href)
|
|
||||||
assert_item_equals((await s.get(href))[0], item)
|
|
||||||
|
|
||||||
new_item = get_item(uid=item.uid)
|
new_item = get_item(uid=item.uid)
|
||||||
new_etag = await s.update(href, new_item, etag)
|
new_etag = s.update(href, new_item, etag)
|
||||||
if new_etag is None:
|
# See https://github.com/untitaker/vdirsyncer/issues/48
|
||||||
_, new_etag = await s.get(href)
|
assert isinstance(new_etag, (bytes, text_type))
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/48
|
assert_item_equals(s.get(href)[0], new_item)
|
||||||
assert isinstance(new_etag, (bytes, str))
|
|
||||||
assert_item_equals((await s.get(href))[0], new_item)
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_update_nonexisting(self, s, get_item):
|
||||||
async def test_update_nonexisting(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.update("huehue", item, '"123"')
|
s.update('huehue', item, '"123"')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_wrong_etag(self, s, get_item):
|
||||||
async def test_wrong_etag(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, _etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.update(href, item, '"lolnope"')
|
s.update(href, item, '"lolnope"')
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.delete(href, '"lolnope"')
|
s.delete(href, '"lolnope"')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_delete(self, s, get_item):
|
||||||
async def test_delete(self, s, get_item):
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
s.delete(href, etag)
|
||||||
await s.delete(href, etag)
|
assert not list(s.list())
|
||||||
assert not await aiostream.stream.list(s.list())
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_delete_nonexisting(self, s, get_item):
|
||||||
async def test_delete_nonexisting(self, s, get_item):
|
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.delete("1", '"123"')
|
s.delete('1', '"123"')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_list(self, s, get_item):
|
||||||
async def test_list(self, s, get_item):
|
assert not list(s.list())
|
||||||
assert not await aiostream.stream.list(s.list())
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
assert list(s.list()) == [(href, etag)]
|
||||||
if etag is None:
|
|
||||||
_, etag = await s.get(href)
|
|
||||||
assert await aiostream.stream.list(s.list()) == [(href, etag)]
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_has(self, s, get_item):
|
||||||
async def test_has(self, s, get_item):
|
assert not s.has('asd')
|
||||||
assert not await s.has("asd")
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
assert s.has(href)
|
||||||
assert await s.has(href)
|
assert not s.has('asd')
|
||||||
assert not await s.has("asd")
|
s.delete(href, etag)
|
||||||
await s.delete(href, etag)
|
assert not s.has(href)
|
||||||
assert not await s.has(href)
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_update_others_stay_the_same(self, s, get_item):
|
||||||
async def test_update_others_stay_the_same(self, s, get_item):
|
info = dict([
|
||||||
info = {}
|
s.upload(get_item()),
|
||||||
for _ in range(4):
|
s.upload(get_item()),
|
||||||
href, etag = await s.upload(get_item())
|
s.upload(get_item()),
|
||||||
if etag is None:
|
s.upload(get_item())
|
||||||
_, etag = await s.get(href)
|
])
|
||||||
info[href] = etag
|
|
||||||
|
|
||||||
items = await aiostream.stream.list(
|
assert dict(
|
||||||
s.get_multi(href for href, etag in info.items())
|
(href, etag) for href, item, etag
|
||||||
)
|
in s.get_multi(href for href, etag in iteritems(info))
|
||||||
assert {href: etag for href, item, etag in items} == info
|
) == info
|
||||||
|
|
||||||
def test_repr(self, s):
|
def test_repr(self, s, get_storage_args):
|
||||||
assert self.storage_class.__name__ in repr(s)
|
assert self.storage_class.__name__ in repr(s)
|
||||||
assert s.instance_name is None
|
assert s.instance_name is None
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_discover(self, requires_collections, get_storage_args, get_item):
|
||||||
async def test_discover(
|
expected = set()
|
||||||
self,
|
items = {}
|
||||||
requires_collections,
|
|
||||||
get_storage_args,
|
|
||||||
get_item,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
collections = set()
|
|
||||||
for i in range(1, 5):
|
for i in range(1, 5):
|
||||||
collection = f"test{i}"
|
# Create collections, but use the "collection" attribute because
|
||||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
# Radicale requires file extensions in their names.
|
||||||
assert not await aiostream.stream.list(s.list())
|
collection = 'test{}'.format(i)
|
||||||
await s.upload(get_item())
|
s = self.storage_class(
|
||||||
collections.add(s.collection)
|
**self.storage_class.create_collection(
|
||||||
|
**get_storage_args(collection=collection)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
discovered = await aiostream.stream.list(
|
items[s.collection] = [s.upload(get_item())]
|
||||||
self.storage_class.discover(**await get_storage_args(collection=None))
|
expected.add(s.collection)
|
||||||
)
|
|
||||||
actual = {c["collection"] for c in discovered}
|
|
||||||
|
|
||||||
assert actual >= collections
|
d = self.storage_class.discover(
|
||||||
|
**get_storage_args(collection=None))
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
actual = set(args['collection'] for args in d)
|
||||||
async def test_create_collection(
|
assert actual >= expected
|
||||||
self,
|
|
||||||
requires_collections,
|
|
||||||
get_storage_args,
|
|
||||||
get_item,
|
|
||||||
):
|
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
|
||||||
pytest.skip("Manual cleanup would be necessary.")
|
|
||||||
if getattr(self, "dav_server", "") == "radicale":
|
|
||||||
pytest.skip("Radicale does not support collection creation")
|
|
||||||
|
|
||||||
args = await get_storage_args(collection=None)
|
for storage_args in d:
|
||||||
args["collection"] = "test"
|
collection = storage_args['collection']
|
||||||
|
if collection not in expected:
|
||||||
|
continue
|
||||||
|
s = self.storage_class(**storage_args)
|
||||||
|
rv = list(s.list())
|
||||||
|
assert rv == items[collection]
|
||||||
|
|
||||||
s = self.storage_class(**await self.storage_class.create_collection(**args))
|
def test_discover_collection_arg(self, requires_collections,
|
||||||
|
get_storage_args):
|
||||||
href = (await s.upload(get_item()))[0]
|
args = get_storage_args(collection='test2')
|
||||||
assert href in await aiostream.stream.list(
|
|
||||||
(href async for href, etag in s.list())
|
|
||||||
)
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_discover_collection_arg(
|
|
||||||
self, requires_collections, get_storage_args
|
|
||||||
):
|
|
||||||
args = await get_storage_args(collection="test2")
|
|
||||||
with pytest.raises(TypeError) as excinfo:
|
with pytest.raises(TypeError) as excinfo:
|
||||||
await aiostream.stream.list(self.storage_class.discover(**args))
|
list(self.storage_class.discover(**args))
|
||||||
|
|
||||||
assert "collection argument must not be given" in str(excinfo.value)
|
assert 'collection argument must not be given' in str(excinfo.value)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_collection_arg(self, requires_collections, get_storage_args):
|
||||||
async def test_collection_arg(self, get_storage_args):
|
s = self.storage_class(**get_storage_args(collection='test2'))
|
||||||
if self.supports_collections:
|
# Can't do stronger assertion because of radicale, which needs a
|
||||||
s = self.storage_class(**await get_storage_args(collection="test2"))
|
# fileextension to guess the collection type.
|
||||||
# Can't do stronger assertion because of radicale, which needs a
|
assert 'test2' in s.collection
|
||||||
# fileextension to guess the collection type.
|
|
||||||
assert "test2" in s.collection
|
|
||||||
else:
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
self.storage_class(collection="ayy", **await get_storage_args())
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_case_sensitive_uids(self, s, get_item):
|
||||||
async def test_case_sensitive_uids(self, s, get_item):
|
if s.storage_name == 'filesystem':
|
||||||
if s.storage_name == "filesystem":
|
pytest.skip('Behavior depends on the filesystem.')
|
||||||
pytest.skip("Behavior depends on the filesystem.")
|
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
s.upload(get_item(uid='A' * 42))
|
||||||
await s.upload(get_item(uid=uid.upper()))
|
s.upload(get_item(uid='a' * 42))
|
||||||
await s.upload(get_item(uid=uid.lower()))
|
items = list(href for href, etag in s.list())
|
||||||
items = [href async for href, etag in s.list()]
|
|
||||||
assert len(items) == 2
|
assert len(items) == 2
|
||||||
assert len(set(items)) == 2
|
assert len(set(items)) == 2
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_specialchars(self, monkeypatch, requires_collections,
|
||||||
async def test_specialchars(
|
get_storage_args, get_item):
|
||||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
if getattr(self, 'dav_server', '') == 'radicale':
|
||||||
):
|
pytest.skip('Radicale is fundamentally broken.')
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
|
||||||
pytest.skip("iCloud and FastMail reject this name.")
|
|
||||||
|
|
||||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
monkeypatch.setattr('vdirsyncer.utils.generate_href', lambda x: x)
|
||||||
|
|
||||||
uid = "test @ foo ät bar град сатану"
|
uid = u'test @ foo ät bar град сатану'
|
||||||
collection = "test @ foo ät bar"
|
collection = 'test @ foo ät bar'
|
||||||
|
|
||||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
s = self.storage_class(**get_storage_args(collection=collection))
|
||||||
item = get_item(uid=uid)
|
item = get_item(uid=uid)
|
||||||
|
|
||||||
href, etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
item2, etag2 = await s.get(href)
|
item2, etag2 = s.get(href)
|
||||||
if etag is not None:
|
assert etag2 == etag
|
||||||
assert etag2 == etag
|
assert_item_equals(item2, item)
|
||||||
assert_item_equals(item2, item)
|
|
||||||
|
|
||||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
(href2, etag2), = s.list()
|
||||||
assert etag2 == etag3
|
assert etag2 == etag
|
||||||
|
|
||||||
|
# https://github.com/owncloud/contacts/issues/581
|
||||||
|
assert href2.replace('%2B', '%20') == href
|
||||||
|
|
||||||
|
item2, etag2 = s.get(href)
|
||||||
|
assert etag2 == etag
|
||||||
|
assert_item_equals(item2, item)
|
||||||
|
|
||||||
assert collection in urlunquote(s.collection)
|
assert collection in urlunquote(s.collection)
|
||||||
if self.storage_class.storage_name.endswith("dav"):
|
if self.storage_class.storage_name.endswith('dav'):
|
||||||
assert urlquote(uid, "/@:") in href
|
assert urlquote(uid, '/@:') in href
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_metadata(self, requires_metadata, s):
|
||||||
async def test_newline_in_uid(
|
if not getattr(self, 'dav_server', ''):
|
||||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
assert not s.get_meta('color')
|
||||||
):
|
assert not s.get_meta('displayname')
|
||||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
|
||||||
|
|
||||||
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
|
|
||||||
|
|
||||||
s = self.storage_class(**await get_storage_args())
|
|
||||||
item = get_item(uid=uid)
|
|
||||||
|
|
||||||
href, etag = await s.upload(item)
|
|
||||||
item2, etag2 = await s.get(href)
|
|
||||||
if etag is not None:
|
|
||||||
assert etag2 == etag
|
|
||||||
assert_item_equals(item2, item)
|
|
||||||
|
|
||||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
|
||||||
assert etag2 == etag3
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_empty_metadata(self, requires_metadata, s):
|
|
||||||
if getattr(self, "dav_server", ""):
|
|
||||||
pytest.skip()
|
|
||||||
|
|
||||||
assert await s.get_meta("color") is None
|
|
||||||
assert await s.get_meta("displayname") is None
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_metadata(self, requires_metadata, s):
|
|
||||||
if getattr(self, "dav_server", "") == "xandikos":
|
|
||||||
pytest.skip("xandikos does not support removing metadata.")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await s.set_meta("color", None)
|
s.set_meta('color', None)
|
||||||
assert await s.get_meta("color") is None
|
assert not s.get_meta('color')
|
||||||
await s.set_meta("color", "#ff0000")
|
s.set_meta('color', u'#ff0000')
|
||||||
assert await s.get_meta("color") == "#ff0000"
|
assert s.get_meta('color') == u'#ff0000'
|
||||||
except exceptions.UnsupportedMetadataError:
|
except exceptions.UnsupportedMetadataError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
for x in (u'hello world', u'hello wörld'):
|
||||||
async def test_encoding_metadata(self, requires_metadata, s):
|
s.set_meta('displayname', x)
|
||||||
for x in ("hello world", "hello wörld"):
|
rv = s.get_meta('displayname')
|
||||||
await s.set_meta("displayname", x)
|
|
||||||
rv = await s.get_meta("displayname")
|
|
||||||
assert rv == x
|
assert rv == x
|
||||||
assert isinstance(rv, str)
|
assert isinstance(rv, text_type)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@given(value=st.one_of(
|
||||||
"value",
|
st.none(),
|
||||||
[
|
printable_characters_strategy.filter(lambda x: x.strip() != x)
|
||||||
None,
|
))
|
||||||
"",
|
def test_metadata_normalization(self, requires_metadata, s, value):
|
||||||
"Hello there!",
|
x = s.get_meta('displayname')
|
||||||
"Österreich",
|
|
||||||
"中国",
|
|
||||||
"한글",
|
|
||||||
"42a4ec99-b1c2-4859-b142-759112f2ca50",
|
|
||||||
"فلسطين",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_metadata_normalization(self, requires_metadata, s, value):
|
|
||||||
x = await s.get_meta("displayname")
|
|
||||||
assert x == normalize_meta_value(x)
|
assert x == normalize_meta_value(x)
|
||||||
|
|
||||||
if not getattr(self, "dav_server", None):
|
if not getattr(self, 'dav_server', None):
|
||||||
# ownCloud replaces "" with "unnamed"
|
# ownCloud replaces "" with "unnamed"
|
||||||
await s.set_meta("displayname", value)
|
s.set_meta('displayname', value)
|
||||||
assert await s.get_meta("displayname") == normalize_meta_value(value)
|
assert s.get_meta('displayname') == normalize_meta_value(value)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_recurring_events(self, s, item_type):
|
|
||||||
if item_type != "VEVENT":
|
|
||||||
pytest.skip("This storage instance doesn't support iCalendar.")
|
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
|
||||||
item = Item(
|
|
||||||
textwrap.dedent(
|
|
||||||
f"""
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART;TZID=UTC:20140325T084000Z
|
|
||||||
DTEND;TZID=UTC:20140325T101000Z
|
|
||||||
DTSTAMP:20140327T060506Z
|
|
||||||
UID:{uid}
|
|
||||||
RECURRENCE-ID;TZID=UTC:20140325T083000Z
|
|
||||||
CREATED:20131216T033331Z
|
|
||||||
DESCRIPTION:
|
|
||||||
LAST-MODIFIED:20140327T060215Z
|
|
||||||
LOCATION:
|
|
||||||
SEQUENCE:1
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:test Event
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART;TZID=UTC:20140128T083000Z
|
|
||||||
DTEND;TZID=UTC:20140128T100000Z
|
|
||||||
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z
|
|
||||||
DTSTAMP:20140327T060506Z
|
|
||||||
UID:{uid}
|
|
||||||
CREATED:20131216T033331Z
|
|
||||||
DESCRIPTION:
|
|
||||||
LAST-MODIFIED:20140222T101012Z
|
|
||||||
LOCATION:
|
|
||||||
SEQUENCE:0
|
|
||||||
STATUS:CONFIRMED
|
|
||||||
SUMMARY:Test event
|
|
||||||
TRANSP:OPAQUE
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
"""
|
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
href, _etag = await s.upload(item)
|
|
||||||
|
|
||||||
item2, _etag2 = await s.get(href)
|
|
||||||
assert normalize_item(item) == normalize_item(item2)
|
|
||||||
|
|
|
||||||
|
|
@ -1,116 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
|
||||||
import pytest_asyncio
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
def wait_for_container(url):
|
|
||||||
"""Wait for a container to initialise.
|
|
||||||
|
|
||||||
Polls a URL every 100ms until the server responds.
|
|
||||||
"""
|
|
||||||
# give the server 5 seconds to settle
|
|
||||||
for _ in range(50):
|
|
||||||
print(_)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests.get(url)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.ConnectionError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
pytest.exit(
|
|
||||||
"Server did not initialise in 5 seconds.\n"
|
|
||||||
"WARNING: There may be a stale docker container still running."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def dockerised_server(name, container_port, exposed_port):
|
|
||||||
"""Run a dockerised DAV server as a contenxt manager."""
|
|
||||||
container_id = None
|
|
||||||
url = f"http://127.0.0.1:{exposed_port}/"
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Hint: This will block while the pull happends, and only return once
|
|
||||||
# the container has actually started.
|
|
||||||
output = subprocess.check_output(
|
|
||||||
[
|
|
||||||
"docker",
|
|
||||||
"run",
|
|
||||||
"--rm",
|
|
||||||
"--detach",
|
|
||||||
"--publish",
|
|
||||||
f"{exposed_port}:{container_port}",
|
|
||||||
f"whynothugo/vdirsyncer-devkit-{name}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
container_id = output.decode().strip()
|
|
||||||
wait_for_container(url)
|
|
||||||
|
|
||||||
yield url
|
|
||||||
finally:
|
|
||||||
if container_id:
|
|
||||||
subprocess.check_output(["docker", "kill", container_id])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def baikal_server():
|
|
||||||
with dockerised_server("baikal", "80", "8002"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def radicale_server():
|
|
||||||
with dockerised_server("radicale", "8001", "8001"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def xandikos_server():
|
|
||||||
with dockerised_server("xandikos", "8000", "8000"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def slow_create_collection(request, aio_connector):
|
|
||||||
# We need to properly clean up because otherwise we might run into
|
|
||||||
# storage limits.
|
|
||||||
to_delete = []
|
|
||||||
|
|
||||||
async def inner(cls: type, args: dict, collection_name: str) -> dict:
|
|
||||||
"""Create a collection
|
|
||||||
|
|
||||||
Returns args necessary to create a Storage instance pointing to it.
|
|
||||||
"""
|
|
||||||
assert collection_name.startswith("test")
|
|
||||||
|
|
||||||
# Make each name unique
|
|
||||||
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}"
|
|
||||||
|
|
||||||
# Create the collection:
|
|
||||||
args = await cls.create_collection(collection_name, **args)
|
|
||||||
collection = cls(**args)
|
|
||||||
|
|
||||||
# Keep collection in a list to be deleted once tests end:
|
|
||||||
to_delete.append(collection)
|
|
||||||
|
|
||||||
assert not await aiostream.stream.list(collection.list())
|
|
||||||
return args
|
|
||||||
|
|
||||||
yield inner
|
|
||||||
|
|
||||||
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
|
|
||||||
|
|
@ -1,53 +1,51 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
|
||||||
|
|
||||||
import aiohttp
|
import requests
|
||||||
import aiostream
|
import requests.exceptions
|
||||||
import pytest
|
|
||||||
|
|
||||||
from tests import assert_item_equals
|
from tests import assert_item_equals
|
||||||
from tests.storage import StorageTests
|
|
||||||
from tests.storage import get_server_mixin
|
|
||||||
from vdirsyncer import exceptions
|
|
||||||
from vdirsyncer.vobject import Item
|
|
||||||
|
|
||||||
dav_server = os.environ.get("DAV_SERVER", "skip")
|
import vdirsyncer.exceptions as exceptions
|
||||||
|
from vdirsyncer.storage.base import Item
|
||||||
|
|
||||||
|
from .. import StorageTests, get_server_mixin
|
||||||
|
|
||||||
|
|
||||||
|
dav_server = os.environ['DAV_SERVER']
|
||||||
ServerMixin = get_server_mixin(dav_server)
|
ServerMixin = get_server_mixin(dav_server)
|
||||||
|
|
||||||
|
|
||||||
class DAVStorageTests(ServerMixin, StorageTests):
|
class DavStorageTests(ServerMixin, StorageTests):
|
||||||
dav_server = dav_server
|
dav_server = dav_server
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
def test_dav_broken_item(self, s):
|
||||||
@pytest.mark.asyncio
|
item = Item(u'HAHA:YES')
|
||||||
async def test_dav_broken_item(self, s):
|
try:
|
||||||
item = Item("HAHA:YES")
|
s.upload(item)
|
||||||
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)):
|
except (exceptions.Error, requests.exceptions.HTTPError):
|
||||||
await s.upload(item)
|
pass
|
||||||
assert not await aiostream.stream.list(s.list())
|
assert not list(s.list())
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||||
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
|
||||||
def breakdown(*a, **kw):
|
def breakdown(*a, **kw):
|
||||||
raise AssertionError("Expected not to be called.")
|
raise AssertionError('Expected not to be called.')
|
||||||
|
|
||||||
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
monkeypatch.setattr('requests.sessions.Session.request', breakdown)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert list(await aiostream.stream.list(s.get_multi([]))) == []
|
assert list(s.get_multi([])) == []
|
||||||
finally:
|
finally:
|
||||||
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
||||||
monkeypatch.undo()
|
monkeypatch.undo()
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
||||||
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
if self.dav_server != 'radicale':
|
||||||
if self.dav_server == "radicale":
|
# Radicale is unable to deal with unicode hrefs
|
||||||
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
monkeypatch.setattr(s, '_get_href',
|
||||||
|
lambda item: item.ident + s.fileext)
|
||||||
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
item = get_item(uid=u'lolätvdirsynceröü град сатану')
|
||||||
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
href, etag = s.upload(item)
|
||||||
href, _etag = await s.upload(item)
|
item2, etag2 = s.get(href)
|
||||||
item2, _etag2 = await s.get(href)
|
|
||||||
assert_item_equals(item, item2)
|
assert_item_equals(item, item2)
|
||||||
|
|
|
||||||
|
|
@ -1,60 +1,50 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import datetime
|
import datetime
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
import pytest
|
||||||
from aioresponses import aioresponses
|
|
||||||
|
|
||||||
from tests import EVENT_TEMPLATE
|
import requests
|
||||||
from tests import TASK_TEMPLATE
|
import requests.exceptions
|
||||||
from tests import VCARD_TEMPLATE
|
|
||||||
from tests.storage import format_item
|
from tests import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE
|
||||||
|
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.storage.dav import CalDAVStorage
|
from vdirsyncer.storage.dav import CaldavStorage
|
||||||
|
|
||||||
from . import DAVStorageTests
|
from . import DavStorageTests, dav_server
|
||||||
from . import dav_server
|
from .. import format_item
|
||||||
|
|
||||||
|
|
||||||
class TestCalDAVStorage(DAVStorageTests):
|
class TestCaldavStorage(DavStorageTests):
|
||||||
storage_class = CalDAVStorage
|
storage_class = CaldavStorage
|
||||||
|
|
||||||
@pytest.fixture(params=["VTODO", "VEVENT"])
|
@pytest.fixture(params=['VTODO', 'VEVENT'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||||
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
s = self.storage_class(item_types=(item_type,), **get_storage_args())
|
||||||
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
|
|
||||||
|
|
||||||
# Most storages hard-fail, but xandikos doesn't.
|
try:
|
||||||
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
|
s.upload(format_item(VCARD_TEMPLATE))
|
||||||
await s.upload(format_item(VCARD_TEMPLATE))
|
except (exceptions.Error, requests.exceptions.HTTPError):
|
||||||
|
pass
|
||||||
assert not await aiostream.stream.list(s.list())
|
assert not list(s.list())
|
||||||
|
|
||||||
# The `arg` param is not named `item_types` because that would hit
|
# The `arg` param is not named `item_types` because that would hit
|
||||||
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize('arg,calls_num', [
|
||||||
("arg", "calls_num"),
|
(('VTODO',), 1),
|
||||||
[
|
(('VEVENT',), 1),
|
||||||
(("VTODO",), 1),
|
(('VTODO', 'VEVENT'), 2),
|
||||||
(("VEVENT",), 1),
|
(('VTODO', 'VEVENT', 'VJOURNAL'), 3),
|
||||||
(("VTODO", "VEVENT"), 2),
|
((), 1)
|
||||||
(("VTODO", "VEVENT", "VJOURNAL"), 3),
|
])
|
||||||
((), 1),
|
def test_item_types_performance(self, get_storage_args, arg, calls_num,
|
||||||
],
|
monkeypatch):
|
||||||
)
|
s = self.storage_class(item_types=arg, **get_storage_args())
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_item_types_performance(
|
|
||||||
self, get_storage_args, arg, calls_num, monkeypatch
|
|
||||||
):
|
|
||||||
s = self.storage_class(item_types=arg, **await get_storage_args())
|
|
||||||
old_parse = s._parse_prop_responses
|
old_parse = s._parse_prop_responses
|
||||||
calls = []
|
calls = []
|
||||||
|
|
||||||
|
|
@ -62,24 +52,19 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
calls.append(None)
|
calls.append(None)
|
||||||
return old_parse(*a, **kw)
|
return old_parse(*a, **kw)
|
||||||
|
|
||||||
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
monkeypatch.setattr(s, '_parse_prop_responses', new_parse)
|
||||||
await aiostream.stream.list(s.list())
|
list(s.list())
|
||||||
assert len(calls) == calls_num
|
assert len(calls) == calls_num
|
||||||
|
|
||||||
@pytest.mark.xfail(
|
@pytest.mark.xfail(dav_server == 'radicale',
|
||||||
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
reason='Radicale doesn\'t support timeranges.')
|
||||||
)
|
def test_timerange_correctness(self, get_storage_args):
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_timerange_correctness(self, get_storage_args):
|
|
||||||
start_date = datetime.datetime(2013, 9, 10)
|
start_date = datetime.datetime(2013, 9, 10)
|
||||||
end_date = datetime.datetime(2013, 9, 13)
|
end_date = datetime.datetime(2013, 9, 13)
|
||||||
s = self.storage_class(
|
s = self.storage_class(start_date=start_date, end_date=end_date,
|
||||||
start_date=start_date, end_date=end_date, **await get_storage_args()
|
**get_storage_args())
|
||||||
)
|
|
||||||
|
|
||||||
too_old_item = format_item(
|
too_old_item = format_item(dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -91,13 +76,9 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
too_new_item = format_item(
|
too_new_item = format_item(dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -109,13 +90,9 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
good_item = format_item(
|
good_item = format_item(dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -127,48 +104,42 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
await s.upload(too_old_item)
|
s.upload(too_old_item)
|
||||||
await s.upload(too_new_item)
|
s.upload(too_new_item)
|
||||||
expected_href, _ = await s.upload(good_item)
|
href, etag = s.upload(good_item)
|
||||||
|
|
||||||
((actual_href, _),) = await aiostream.stream.list(s.list())
|
assert list(s.list()) == [(href, etag)]
|
||||||
assert actual_href == expected_href
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_invalid_resource(self, monkeypatch, get_storage_args):
|
||||||
async def test_invalid_resource(self, monkeypatch, get_storage_args):
|
calls = []
|
||||||
args = await get_storage_args(collection=None)
|
args = get_storage_args(collection=None)
|
||||||
|
|
||||||
with aioresponses() as m:
|
def request(session, method, url, **kwargs):
|
||||||
m.add(args["url"], method="PROPFIND", status=200, body="Hello world")
|
assert url == args['url']
|
||||||
|
calls.append(None)
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
r = requests.Response()
|
||||||
s = self.storage_class(**args)
|
r.status_code = 200
|
||||||
await aiostream.stream.list(s.list())
|
r._content = 'Hello World.'
|
||||||
|
return r
|
||||||
|
|
||||||
assert len(m.requests) == 1
|
monkeypatch.setattr('requests.sessions.Session.request', request)
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
with pytest.raises(ValueError):
|
||||||
@pytest.mark.skipif(
|
s = self.storage_class(**args)
|
||||||
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
list(s.list())
|
||||||
)
|
assert len(calls) == 1
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_item_types_general(self, s):
|
|
||||||
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0]
|
|
||||||
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
|
|
||||||
s.item_types = ("VTODO", "VEVENT")
|
|
||||||
|
|
||||||
async def hrefs():
|
def test_item_types_general(self, s):
|
||||||
return {href async for href, etag in s.list()}
|
event = s.upload(format_item(EVENT_TEMPLATE))
|
||||||
|
task = s.upload(format_item(TASK_TEMPLATE))
|
||||||
assert await hrefs() == {event, task}
|
s.item_types = ('VTODO', 'VEVENT')
|
||||||
s.item_types = ("VTODO",)
|
assert set(s.list()) == set([event, task])
|
||||||
assert await hrefs() == {task}
|
s.item_types = ('VTODO',)
|
||||||
s.item_types = ("VEVENT",)
|
assert set(s.list()) == set([task])
|
||||||
assert await hrefs() == {event}
|
s.item_types = ('VEVENT',)
|
||||||
|
assert set(s.list()) == set([event])
|
||||||
s.item_types = ()
|
s.item_types = ()
|
||||||
assert await hrefs() == {event, task}
|
assert set(s.list()) == set([event, task])
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,15 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.dav import CardDAVStorage
|
from vdirsyncer.storage.dav import CarddavStorage
|
||||||
|
|
||||||
from . import DAVStorageTests
|
from . import DavStorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestCardDAVStorage(DAVStorageTests):
|
class TestCarddavStorage(DavStorageTests):
|
||||||
storage_class = CardDAVStorage
|
storage_class = CarddavStorage
|
||||||
|
|
||||||
@pytest.fixture(params=["VCARD"])
|
@pytest.fixture(params=['VCARD'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
return request.param
|
return request.param
|
||||||
|
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
|
||||||
from vdirsyncer.storage.dav import _merge_xml
|
|
||||||
from vdirsyncer.storage.dav import _normalize_href
|
|
||||||
from vdirsyncer.storage.dav import _parse_xml
|
|
||||||
|
|
||||||
|
|
||||||
def test_xml_utilities():
|
|
||||||
x = _parse_xml(
|
|
||||||
b"""<?xml version="1.0" encoding="UTF-8" ?>
|
|
||||||
<multistatus xmlns="DAV:">
|
|
||||||
<response>
|
|
||||||
<propstat>
|
|
||||||
<status>HTTP/1.1 404 Not Found</status>
|
|
||||||
<prop>
|
|
||||||
<getcontenttype/>
|
|
||||||
</prop>
|
|
||||||
</propstat>
|
|
||||||
<propstat>
|
|
||||||
<prop>
|
|
||||||
<resourcetype>
|
|
||||||
<collection/>
|
|
||||||
</resourcetype>
|
|
||||||
</prop>
|
|
||||||
</propstat>
|
|
||||||
</response>
|
|
||||||
</multistatus>
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
response = x.find("{DAV:}response")
|
|
||||||
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
|
|
||||||
assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None
|
|
||||||
assert props.find("{DAV:}getcontenttype") is not None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("char", range(32))
|
|
||||||
def test_xml_specialchars(char):
|
|
||||||
x = _parse_xml(
|
|
||||||
'<?xml version="1.0" encoding="UTF-8" ?>'
|
|
||||||
f"<foo>ye{chr(char)}s\r\n"
|
|
||||||
"hello</foo>".encode("ascii")
|
|
||||||
)
|
|
||||||
|
|
||||||
if char in _BAD_XML_CHARS:
|
|
||||||
assert x.text == "yes\nhello"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"href",
|
|
||||||
[
|
|
||||||
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_normalize_href(href):
|
|
||||||
assert href == _normalize_href("https://example.com", href)
|
|
||||||
10
tests/storage/dav/test_utils.py
Normal file
10
tests/storage/dav/test_utils.py
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from vdirsyncer.storage.dav import _parse_xml
|
||||||
|
|
||||||
|
|
||||||
|
def test_broken_xml(capsys):
|
||||||
|
rv = _parse_xml(b'<h1>\x10haha</h1>')
|
||||||
|
assert rv.text == 'haha'
|
||||||
|
warnings = capsys.readouterr()[1]
|
||||||
|
assert 'partially invalid xml' in warnings.lower()
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
1
tests/storage/servers/baikal
Submodule
1
tests/storage/servers/baikal
Submodule
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit b3f2f5df327dec0add73a262e015954fb56287f6
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(
|
|
||||||
self,
|
|
||||||
request,
|
|
||||||
tmpdir,
|
|
||||||
slow_create_collection,
|
|
||||||
baikal_server,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
async def inner(collection="test"):
|
|
||||||
base_url = "http://127.0.0.1:8002/"
|
|
||||||
args = {
|
|
||||||
"url": base_url,
|
|
||||||
"username": "baikal",
|
|
||||||
"password": "baikal",
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".vcf":
|
|
||||||
args["url"] = base_url + "card.php/"
|
|
||||||
else:
|
|
||||||
args["url"] = base_url + "cal.php/"
|
|
||||||
|
|
||||||
if collection is not None:
|
|
||||||
args = await slow_create_collection(
|
|
||||||
self.storage_class,
|
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
1
tests/storage/servers/davical
Submodule
1
tests/storage/servers/davical
Submodule
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit cce1273cc883f3cdb3ccf37097b29ac0263b7055
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
try:
|
|
||||||
caldav_args = {
|
|
||||||
# Those credentials are configured through the Travis UI
|
|
||||||
"username": os.environ["DAVICAL_USERNAME"].strip(),
|
|
||||||
"password": os.environ["DAVICAL_PASSWORD"].strip(),
|
|
||||||
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
|
||||||
}
|
|
||||||
except KeyError as e:
|
|
||||||
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flaky(reruns=5)
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def davical_args(self):
|
|
||||||
if self.storage_class.fileext == ".ics":
|
|
||||||
return dict(caldav_args)
|
|
||||||
elif self.storage_class.fileext == ".vcf":
|
|
||||||
pytest.skip("No carddav")
|
|
||||||
else:
|
|
||||||
raise RuntimeError
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(self, davical_args, request):
|
|
||||||
async def inner(collection="test"):
|
|
||||||
if collection is None:
|
|
||||||
return davical_args
|
|
||||||
|
|
||||||
assert collection.startswith("test")
|
|
||||||
|
|
||||||
for _ in range(4):
|
|
||||||
args = self.storage_class.create_collection(
|
|
||||||
collection + str(uuid.uuid4()), **davical_args
|
|
||||||
)
|
|
||||||
s = self.storage_class(**args)
|
|
||||||
if not list(s.list()):
|
|
||||||
# See: https://stackoverflow.com/a/33984811
|
|
||||||
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
|
|
||||||
return args
|
|
||||||
|
|
||||||
raise RuntimeError("Failed to find free collection.")
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
pip install pytest-rerunfailures
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(self, slow_create_collection, aio_connector, request):
|
|
||||||
if (
|
|
||||||
"item_type" in request.fixturenames
|
|
||||||
and request.getfixturevalue("item_type") == "VTODO"
|
|
||||||
):
|
|
||||||
# Fastmail has non-standard support for TODOs
|
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/824
|
|
||||||
pytest.skip("Fastmail has non-standard VTODO support.")
|
|
||||||
|
|
||||||
async def inner(collection="test"):
|
|
||||||
args = {
|
|
||||||
"username": os.environ["FASTMAIL_USERNAME"],
|
|
||||||
"password": os.environ["FASTMAIL_PASSWORD"],
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".ics":
|
|
||||||
args["url"] = "https://caldav.fastmail.com/"
|
|
||||||
elif self.storage_class.fileext == ".vcf":
|
|
||||||
args["url"] = "https://carddav.fastmail.com/"
|
|
||||||
else:
|
|
||||||
raise RuntimeError
|
|
||||||
|
|
||||||
if collection is not None:
|
|
||||||
args = await slow_create_collection(
|
|
||||||
self.storage_class,
|
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(self, item_type, slow_create_collection):
|
|
||||||
if item_type != "VEVENT":
|
|
||||||
# iCloud collections can either be calendars or task lists.
|
|
||||||
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
|
|
||||||
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
|
||||||
|
|
||||||
async def inner(collection="test"):
|
|
||||||
args = {
|
|
||||||
"username": os.environ["ICLOUD_USERNAME"],
|
|
||||||
"password": os.environ["ICLOUD_PASSWORD"],
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".ics":
|
|
||||||
args["url"] = "https://caldav.icloud.com/"
|
|
||||||
elif self.storage_class.fileext == ".vcf":
|
|
||||||
args["url"] = "https://contacts.icloud.com/"
|
|
||||||
else:
|
|
||||||
raise RuntimeError
|
|
||||||
|
|
||||||
if collection is not None:
|
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
1
tests/storage/servers/mysteryshack
Submodule
1
tests/storage/servers/mysteryshack
Submodule
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit addee3272a4289b78e3c816e0fcb4ccace0df336
|
||||||
1
tests/storage/servers/owncloud
Submodule
1
tests/storage/servers/owncloud
Submodule
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit 9f4b305b7e77fa42f8c1875099236ecb792b40dc
|
||||||
|
|
@ -1,33 +1,125 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer.utils.compat import urlquote
|
||||||
|
|
||||||
|
import wsgi_intercept
|
||||||
|
import wsgi_intercept.requests_intercept
|
||||||
|
|
||||||
|
|
||||||
|
RADICALE_SCHEMA = '''
|
||||||
|
create table collection (
|
||||||
|
path varchar(200) not null,
|
||||||
|
parent_path varchar(200) references collection (path),
|
||||||
|
primary key (path));
|
||||||
|
|
||||||
|
create table item (
|
||||||
|
name varchar(200) not null,
|
||||||
|
tag text not null,
|
||||||
|
collection_path varchar(200) references collection (path),
|
||||||
|
primary key (name));
|
||||||
|
|
||||||
|
create table header (
|
||||||
|
name varchar(200) not null,
|
||||||
|
value text not null,
|
||||||
|
collection_path varchar(200) references collection (path),
|
||||||
|
primary key (name, collection_path));
|
||||||
|
|
||||||
|
create table line (
|
||||||
|
name text not null,
|
||||||
|
value text not null,
|
||||||
|
item_name varchar(200) references item (name),
|
||||||
|
timestamp bigint not null,
|
||||||
|
primary key (timestamp));
|
||||||
|
|
||||||
|
create table property (
|
||||||
|
name varchar(200) not null,
|
||||||
|
value text not null,
|
||||||
|
collection_path varchar(200) references collection (path),
|
||||||
|
primary key (name, collection_path));
|
||||||
|
'''.split(';')
|
||||||
|
|
||||||
|
storage_backend = os.environ.get('RADICALE_BACKEND', '') or 'filesystem'
|
||||||
|
|
||||||
|
|
||||||
|
def do_the_radicale_dance(tmpdir):
|
||||||
|
# All of radicale is already global state, the cleanliness of the code and
|
||||||
|
# all hope is already lost. This function runs before every test.
|
||||||
|
|
||||||
|
# This wipes out the radicale modules, to reset all of its state.
|
||||||
|
for module in list(sys.modules):
|
||||||
|
if module.startswith('radicale'):
|
||||||
|
del sys.modules[module]
|
||||||
|
|
||||||
|
# radicale.config looks for this envvar. We have to delete it before it
|
||||||
|
# tries to load a config file.
|
||||||
|
os.environ['RADICALE_CONFIG'] = ''
|
||||||
|
import radicale.config
|
||||||
|
|
||||||
|
# Now we can set some basic configuration.
|
||||||
|
# Radicale <=0.7 doesn't work with this, therefore we just catch the
|
||||||
|
# exception and assume Radicale is open for everyone.
|
||||||
|
try:
|
||||||
|
radicale.config.set('rights', 'type', 'owner_only')
|
||||||
|
radicale.config.set('auth', 'type', 'http')
|
||||||
|
|
||||||
|
import radicale.auth.http
|
||||||
|
|
||||||
|
def is_authenticated(user, password):
|
||||||
|
return user == 'bob' and password == 'bob'
|
||||||
|
radicale.auth.http.is_authenticated = is_authenticated
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
if storage_backend in ('filesystem', 'multifilesystem'):
|
||||||
|
radicale.config.set('storage', 'type', storage_backend)
|
||||||
|
radicale.config.set('storage', 'filesystem_folder', tmpdir)
|
||||||
|
elif storage_backend == 'database':
|
||||||
|
radicale.config.set('storage', 'type', 'database')
|
||||||
|
radicale.config.set('storage', 'database_url', 'sqlite://')
|
||||||
|
from radicale.storage import database
|
||||||
|
|
||||||
|
s = database.Session()
|
||||||
|
for line in RADICALE_SCHEMA:
|
||||||
|
s.execute(line)
|
||||||
|
s.commit()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(storage_backend)
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin(object):
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup(self, request, tmpdir):
|
||||||
|
do_the_radicale_dance(str(tmpdir))
|
||||||
|
from radicale import Application
|
||||||
|
|
||||||
|
wsgi_intercept.requests_intercept.install()
|
||||||
|
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 80, Application)
|
||||||
|
|
||||||
|
def teardown():
|
||||||
|
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 80)
|
||||||
|
wsgi_intercept.requests_intercept.uninstall()
|
||||||
|
request.addfinalizer(teardown)
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(
|
def get_storage_args(self, get_item):
|
||||||
self,
|
def inner(collection='test'):
|
||||||
request,
|
url = 'http://127.0.0.1/bob/'
|
||||||
tmpdir,
|
if collection is not None:
|
||||||
slow_create_collection,
|
collection += self.storage_class.fileext
|
||||||
radicale_server,
|
url = url.rstrip('/') + '/' + urlquote(collection)
|
||||||
aio_connector,
|
|
||||||
):
|
rv = {'url': url, 'username': 'bob', 'password': 'bob',
|
||||||
async def inner(collection="test"):
|
'collection': collection}
|
||||||
url = "http://127.0.0.1:8001/"
|
|
||||||
args = {
|
|
||||||
"url": url,
|
|
||||||
"username": "radicale",
|
|
||||||
"password": "radicale",
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = await slow_create_collection(
|
s = self.storage_class(**rv)
|
||||||
self.storage_class,
|
s.delete(*s.upload(get_item()))
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
return rv
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
21
tests/storage/servers/radicale/install.sh
Normal file
21
tests/storage/servers/radicale/install.sh
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ -z "$RADICALE_BACKEND" ]; then
|
||||||
|
echo "Missing RADICALE_BACKEND"
|
||||||
|
false
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$REQUIREMENTS" = "release" ] || [ "$REQUIREMENTS" = "minimal" ]; then
|
||||||
|
radicale_pkg="radicale"
|
||||||
|
elif [ "$REQUIREMENTS" = "devel" ]; then
|
||||||
|
radicale_pkg="git+https://github.com/Kozea/Radicale.git"
|
||||||
|
else
|
||||||
|
echo "Invalid requirements envvar"
|
||||||
|
false
|
||||||
|
fi
|
||||||
|
pip install wsgi_intercept $radicale_pkg
|
||||||
|
|
||||||
|
if [ "$RADICALE_BACKEND" = "database" ]; then
|
||||||
|
pip install sqlalchemy
|
||||||
|
fi
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
class ServerMixin(object):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self):
|
||||||
pytest.skip("DAV tests disabled.")
|
pytest.skip('DAV tests disabled.')
|
||||||
|
|
|
||||||
1
tests/storage/servers/skip/install.sh
Executable file
1
tests/storage/servers/skip/install.sh
Executable file
|
|
@ -0,0 +1 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(
|
|
||||||
self,
|
|
||||||
request,
|
|
||||||
tmpdir,
|
|
||||||
slow_create_collection,
|
|
||||||
xandikos_server,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
async def inner(collection="test"):
|
|
||||||
url = "http://127.0.0.1:8000/"
|
|
||||||
args = {"url": url, "connector": aio_connector}
|
|
||||||
|
|
||||||
if collection is not None:
|
|
||||||
args = await slow_create_collection(
|
|
||||||
self.storage_class,
|
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.filesystem import FilesystemStorage
|
from vdirsyncer.storage.filesystem import FilesystemStorage
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.utils.vobject import Item
|
||||||
|
|
||||||
from . import StorageTests
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
@ -16,117 +15,65 @@ class TestFilesystemStorage(StorageTests):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, tmpdir):
|
def get_storage_args(self, tmpdir):
|
||||||
async def inner(collection="test"):
|
def inner(collection='test'):
|
||||||
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
|
rv = {'path': str(tmpdir), 'fileext': '.txt', 'collection':
|
||||||
|
collection}
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
rv = await self.storage_class.create_collection(**rv)
|
rv = self.storage_class.create_collection(**rv)
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
def test_is_not_directory(self, tmpdir):
|
def test_is_not_directory(self, tmpdir):
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(IOError):
|
||||||
f = tmpdir.join("hue")
|
f = tmpdir.join('hue')
|
||||||
f.write("stub")
|
f.write('stub')
|
||||||
self.storage_class(str(tmpdir) + "/hue", ".txt")
|
self.storage_class(str(tmpdir) + '/hue', '.txt')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_broken_data(self, tmpdir):
|
||||||
async def test_broken_data(self, tmpdir):
|
s = self.storage_class(str(tmpdir), '.txt')
|
||||||
s = self.storage_class(str(tmpdir), ".txt")
|
|
||||||
|
|
||||||
class BrokenItem:
|
class BrokenItem(object):
|
||||||
raw = "Ц, Ш, Л, ж, Д, З, Ю".encode()
|
raw = u'Ц, Ш, Л, ж, Д, З, Ю'.encode('utf-8')
|
||||||
uid = "jeezus"
|
uid = 'jeezus'
|
||||||
ident = uid
|
ident = uid
|
||||||
|
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
await s.upload(BrokenItem)
|
s.upload(BrokenItem)
|
||||||
assert not tmpdir.listdir()
|
assert not tmpdir.listdir()
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_ident_with_slash(self, tmpdir):
|
||||||
async def test_ident_with_slash(self, tmpdir):
|
s = self.storage_class(str(tmpdir), '.txt')
|
||||||
s = self.storage_class(str(tmpdir), ".txt")
|
s.upload(Item(u'UID:a/b/c'))
|
||||||
await s.upload(Item("UID:a/b/c"))
|
item_file, = tmpdir.listdir()
|
||||||
(item_file,) = tmpdir.listdir()
|
assert '/' not in item_file.basename and item_file.isfile()
|
||||||
assert "/" not in item_file.basename
|
|
||||||
assert item_file.isfile()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_too_long_uid(self, tmpdir):
|
||||||
async def test_ignore_tmp_files(self, tmpdir):
|
s = self.storage_class(str(tmpdir), '.txt')
|
||||||
"""Test that files with .tmp suffix beside .ics files are ignored."""
|
item = Item(u'UID:' + u'hue' * 600)
|
||||||
s = self.storage_class(str(tmpdir), ".ics")
|
href, etag = s.upload(item)
|
||||||
await s.upload(Item("UID:xyzxyz"))
|
|
||||||
(item_file,) = tmpdir.listdir()
|
|
||||||
item_file.copy(item_file.new(ext="tmp"))
|
|
||||||
assert len(tmpdir.listdir()) == 2
|
|
||||||
assert len(await aiostream.stream.list(s.list())) == 1
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_ignore_tmp_files_empty_fileext(self, tmpdir):
|
|
||||||
"""Test that files with .tmp suffix are ignored with empty fileext."""
|
|
||||||
s = self.storage_class(str(tmpdir), "")
|
|
||||||
await s.upload(Item("UID:xyzxyz"))
|
|
||||||
(item_file,) = tmpdir.listdir()
|
|
||||||
item_file.copy(item_file.new(ext="tmp"))
|
|
||||||
assert len(tmpdir.listdir()) == 2
|
|
||||||
# assert False, tmpdir.listdir() # enable to see the created filename
|
|
||||||
assert len(await aiostream.stream.list(s.list())) == 1
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_ignore_files_typical_backup(self, tmpdir):
|
|
||||||
"""Test file-name ignorance with typical backup ending ~."""
|
|
||||||
ignorext = "~" # without dot
|
|
||||||
|
|
||||||
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
|
|
||||||
await storage.upload(Item("UID:xyzxyz"))
|
|
||||||
(item_file,) = tmpdir.listdir()
|
|
||||||
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
|
|
||||||
|
|
||||||
assert len(tmpdir.listdir()) == 2
|
|
||||||
assert len(await aiostream.stream.list(storage.list())) == 1
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_too_long_uid(self, tmpdir):
|
|
||||||
storage = self.storage_class(str(tmpdir), ".txt")
|
|
||||||
item = Item("UID:" + "hue" * 600)
|
|
||||||
|
|
||||||
href, _etag = await storage.upload(item)
|
|
||||||
assert item.uid not in href
|
assert item.uid not in href
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
||||||
async def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
|
||||||
def check_call_mock(*args, **kwargs):
|
def check_call_mock(*args, **kwargs):
|
||||||
raise AssertionError
|
assert False
|
||||||
|
|
||||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
monkeypatch.setattr(subprocess, 'call', check_call_mock)
|
||||||
|
|
||||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
|
s = self.storage_class(str(tmpdir), '.txt', post_hook=None)
|
||||||
await s.upload(Item("UID:a/b/c"))
|
s.upload(Item(u'UID:a/b/c'))
|
||||||
|
|
||||||
|
def test_post_hook_active(self, tmpdir, monkeypatch):
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_post_hook_active(self, tmpdir, monkeypatch):
|
|
||||||
calls = []
|
calls = []
|
||||||
exe = "foo"
|
exe = 'foo'
|
||||||
|
|
||||||
def check_call_mock(call, *args, **kwargs):
|
def check_call_mock(l, *args, **kwargs):
|
||||||
calls.append(True)
|
calls.append(True)
|
||||||
assert len(call) == 2
|
assert len(l) == 2
|
||||||
assert call[0] == exe
|
assert l[0] == exe
|
||||||
|
|
||||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
monkeypatch.setattr(subprocess, 'call', check_call_mock)
|
||||||
|
|
||||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
|
s = self.storage_class(str(tmpdir), '.txt', post_hook=exe)
|
||||||
await s.upload(Item("UID:a/b/c"))
|
s.upload(Item(u'UID:a/b/c'))
|
||||||
assert calls
|
assert calls
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_ignore_git_dirs(self, tmpdir):
|
|
||||||
tmpdir.mkdir(".git").mkdir("foo")
|
|
||||||
tmpdir.mkdir("a")
|
|
||||||
tmpdir.mkdir("b")
|
|
||||||
|
|
||||||
expected = {"a", "b"}
|
|
||||||
actual = {
|
|
||||||
c["collection"] async for c in self.storage_class.discover(str(tmpdir))
|
|
||||||
}
|
|
||||||
assert actual == expected
|
|
||||||
|
|
|
||||||
|
|
@ -1,163 +1,122 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import pytest
|
import pytest
|
||||||
from aioresponses import CallbackResult
|
|
||||||
from aioresponses import aioresponses
|
from requests import Response
|
||||||
|
|
||||||
from tests import normalize_item
|
from tests import normalize_item
|
||||||
|
|
||||||
from vdirsyncer.exceptions import UserError
|
from vdirsyncer.exceptions import UserError
|
||||||
from vdirsyncer.http import BasicAuthMethod
|
from vdirsyncer.storage.http import HttpStorage, prepare_auth
|
||||||
from vdirsyncer.http import DigestAuthMethod
|
|
||||||
from vdirsyncer.http import UsageLimitReached
|
|
||||||
from vdirsyncer.http import request
|
|
||||||
from vdirsyncer.storage.http import HttpStorage
|
|
||||||
from vdirsyncer.storage.http import prepare_auth
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_list(monkeypatch):
|
||||||
async def test_list(aio_connector):
|
collection_url = 'http://127.0.0.1/calendar/collection.ics'
|
||||||
collection_url = "http://127.0.0.1/calendar/collection.ics"
|
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
(
|
(u'BEGIN:VEVENT\n'
|
||||||
"BEGIN:VEVENT\n"
|
u'SUMMARY:Eine Kurzinfo\n'
|
||||||
"SUMMARY:Eine Kurzinfo\n"
|
u'DESCRIPTION:Beschreibung des Termines\n'
|
||||||
"DESCRIPTION:Beschreibung des Termines\n"
|
u'END:VEVENT'),
|
||||||
"END:VEVENT"
|
(u'BEGIN:VEVENT\n'
|
||||||
),
|
u'SUMMARY:Eine zweite Küèrzinfo\n'
|
||||||
(
|
u'DESCRIPTION:Beschreibung des anderen Termines\n'
|
||||||
"BEGIN:VEVENT\n"
|
u'BEGIN:VALARM\n'
|
||||||
"SUMMARY:Eine zweite Küèrzinfo\n"
|
u'ACTION:AUDIO\n'
|
||||||
"DESCRIPTION:Beschreibung des anderen Termines\n"
|
u'TRIGGER:19980403T120000\n'
|
||||||
"BEGIN:VALARM\n"
|
u'ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n'
|
||||||
"ACTION:AUDIO\n"
|
u'REPEAT:4\n'
|
||||||
"TRIGGER:19980403T120000\n"
|
u'DURATION:PT1H\n'
|
||||||
"ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n"
|
u'END:VALARM\n'
|
||||||
"REPEAT:4\n"
|
u'END:VEVENT')
|
||||||
"DURATION:PT1H\n"
|
|
||||||
"END:VALARM\n"
|
|
||||||
"END:VEVENT"
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
|
responses = [
|
||||||
|
u'\n'.join([u'BEGIN:VCALENDAR'] + items + [u'END:VCALENDAR'])
|
||||||
|
] * 2
|
||||||
|
|
||||||
def callback(url, headers, **kwargs):
|
def get(self, method, url, *a, **kw):
|
||||||
assert headers["User-Agent"].startswith("vdirsyncer/")
|
assert method == 'GET'
|
||||||
|
assert url == collection_url
|
||||||
|
r = Response()
|
||||||
|
r.status_code = 200
|
||||||
assert responses
|
assert responses
|
||||||
|
r._content = responses.pop().encode('utf-8')
|
||||||
|
r.headers['Content-Type'] = 'text/icalendar'
|
||||||
|
r.encoding = 'ISO-8859-1'
|
||||||
|
return r
|
||||||
|
|
||||||
return CallbackResult(
|
monkeypatch.setattr('requests.sessions.Session.request', get)
|
||||||
status=200,
|
|
||||||
body=responses.pop().encode("utf-8"),
|
|
||||||
headers={"Content-Type": "text/calendar; charset=iso-8859-1"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with aioresponses() as m:
|
s = HttpStorage(url=collection_url)
|
||||||
m.get(collection_url, callback=callback, repeat=True)
|
|
||||||
|
|
||||||
s = HttpStorage(url=collection_url, connector=aio_connector)
|
found_items = {}
|
||||||
|
|
||||||
found_items = {}
|
for href, etag in s.list():
|
||||||
|
item, etag2 = s.get(href)
|
||||||
|
assert item.uid is None
|
||||||
|
assert etag2 == etag
|
||||||
|
found_items[normalize_item(item)] = href
|
||||||
|
|
||||||
async for href, etag in s.list():
|
expected = set(normalize_item(u'BEGIN:VCALENDAR\n' + x + '\nEND:VCALENDAR')
|
||||||
item, etag2 = await s.get(href)
|
for x in items)
|
||||||
assert item.uid is not None
|
|
||||||
assert etag2 == etag
|
|
||||||
found_items[normalize_item(item)] = href
|
|
||||||
|
|
||||||
expected = {
|
assert set(found_items) == expected
|
||||||
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
|
|
||||||
}
|
|
||||||
|
|
||||||
assert set(found_items) == expected
|
for href, etag in s.list():
|
||||||
|
item, etag2 = s.get(href)
|
||||||
async for href, etag in s.list():
|
assert item.uid is None
|
||||||
item, etag2 = await s.get(href)
|
assert etag2 == etag
|
||||||
assert item.uid is not None
|
assert found_items[normalize_item(item)] == href
|
||||||
assert etag2 == etag
|
|
||||||
assert found_items[normalize_item(item)] == href
|
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_param(aio_connector):
|
def test_readonly_param():
|
||||||
"""The ``readonly`` param cannot be ``False``."""
|
url = u'http://example.com/'
|
||||||
|
|
||||||
url = "http://example.com/"
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
HttpStorage(url=url, read_only=False, connector=aio_connector)
|
HttpStorage(url=url, read_only=False)
|
||||||
|
|
||||||
a = HttpStorage(url=url, read_only=True, connector=aio_connector)
|
a = HttpStorage(url=url, read_only=True).read_only
|
||||||
b = HttpStorage(url=url, read_only=None, connector=aio_connector)
|
b = HttpStorage(url=url, read_only=None).read_only
|
||||||
|
assert a is b is True
|
||||||
assert a.read_only is b.read_only is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_auth():
|
def test_prepare_auth():
|
||||||
assert prepare_auth(None, "", "") is None
|
assert prepare_auth(None, '', '') is None
|
||||||
|
|
||||||
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
|
assert prepare_auth('basic', 'user', 'pwd') == ('user', 'pwd')
|
||||||
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
assert prepare_auth('basic', '', 'pwd')
|
||||||
|
assert 'you need to specify username and password' in \
|
||||||
|
str(excinfo.value).lower()
|
||||||
|
|
||||||
|
from requests.auth import HTTPDigestAuth
|
||||||
|
assert isinstance(prepare_auth('digest', 'user', 'pwd'),
|
||||||
|
HTTPDigestAuth)
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
assert prepare_auth("basic", "", "pwd")
|
prepare_auth('ladida', 'user', 'pwd')
|
||||||
assert "you need to specify username and password" in str(excinfo.value).lower()
|
|
||||||
|
|
||||||
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
|
assert 'unknown authentication method' in str(excinfo.value).lower()
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
|
||||||
prepare_auth("ladida", "user", "pwd")
|
|
||||||
|
|
||||||
assert "unknown authentication method" in str(excinfo.value).lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_auth_guess():
|
@pytest.mark.parametrize('auth', (None, 'guess'))
|
||||||
# guess auth is currently not supported
|
def test_prepare_auth_guess(monkeypatch, auth):
|
||||||
|
import requests_toolbelt.auth.guess
|
||||||
|
|
||||||
|
assert isinstance(prepare_auth(auth, 'user', 'pwd'),
|
||||||
|
requests_toolbelt.auth.guess.GuessAuth)
|
||||||
|
|
||||||
|
monkeypatch.delattr(requests_toolbelt.auth.guess, 'GuessAuth')
|
||||||
|
|
||||||
with pytest.raises(UserError) as excinfo:
|
with pytest.raises(UserError) as excinfo:
|
||||||
prepare_auth("guess", "usr", "pwd")
|
prepare_auth(auth, 'user', 'pwd')
|
||||||
|
|
||||||
assert "not supported" in str(excinfo.value).lower()
|
assert 'requests_toolbelt is too old' in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
def test_verify_false_disallowed(aio_connector):
|
def test_verify_false_disallowed():
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
|
HttpStorage(url='http://example.com', verify=False)
|
||||||
|
|
||||||
assert "must be a path to a pem-file." in str(excinfo.value).lower()
|
assert 'forbidden' in str(excinfo.value).lower()
|
||||||
|
assert 'consider setting verify_fingerprint' in str(excinfo.value).lower()
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_403_usage_limit_exceeded(aio_connector):
|
|
||||||
url = "http://127.0.0.1/test_403"
|
|
||||||
error_body = {
|
|
||||||
"error": {
|
|
||||||
"errors": [
|
|
||||||
{
|
|
||||||
"domain": "usageLimits",
|
|
||||||
"message": "Calendar usage limits exceeded.",
|
|
||||||
"reason": "quotaExceeded",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"code": 403,
|
|
||||||
"message": "Calendar usage limits exceeded.",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
|
||||||
with aioresponses() as m:
|
|
||||||
m.get(url, status=403, payload=error_body, repeat=True)
|
|
||||||
with pytest.raises(UsageLimitReached):
|
|
||||||
await request("GET", url, session)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_403_without_usage_limits_domain(aio_connector):
|
|
||||||
"""A 403 JSON error without the Google 'usageLimits' domain should not be
|
|
||||||
treated as UsageLimitReached and should surface as ClientResponseError.
|
|
||||||
"""
|
|
||||||
url = "http://127.0.0.1/test_403_no_usage_limits"
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
|
||||||
with aioresponses() as m:
|
|
||||||
m.get(url, status=403, repeat=True)
|
|
||||||
with pytest.raises(aiohttp.ClientResponseError):
|
|
||||||
await request("GET", url, session)
|
|
||||||
|
|
|
||||||
|
|
@ -1,51 +1,43 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
import pytest
|
||||||
from aioresponses import CallbackResult
|
|
||||||
from aioresponses import aioresponses
|
|
||||||
|
|
||||||
import vdirsyncer.storage.http
|
from requests import Response
|
||||||
|
|
||||||
from vdirsyncer.storage.base import Storage
|
from vdirsyncer.storage.base import Storage
|
||||||
|
import vdirsyncer.storage.http
|
||||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||||
|
|
||||||
from . import StorageTests
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class CombinedStorage(Storage):
|
class CombinedStorage(Storage):
|
||||||
"""A subclass of HttpStorage to make testing easier. It supports writes via
|
'''A subclass of HttpStorage to make testing easier. It supports writes via
|
||||||
SingleFileStorage."""
|
SingleFileStorage.'''
|
||||||
|
_repr_attributes = ('url', 'path')
|
||||||
|
|
||||||
_repr_attributes = ("url", "path")
|
def __init__(self, url, path, **kwargs):
|
||||||
storage_name = "http_and_singlefile"
|
super(CombinedStorage, self).__init__(**kwargs)
|
||||||
|
|
||||||
def __init__(self, url, path, *, connector, **kwargs):
|
|
||||||
if kwargs.get("collection") is not None:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
self.url = url
|
self.url = url
|
||||||
self.path = path
|
self.path = path
|
||||||
self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector)
|
self._reader = vdirsyncer.storage.http.HttpStorage(url=url)
|
||||||
self._reader._ignore_uids = False
|
|
||||||
self._writer = SingleFileStorage(path=path)
|
self._writer = SingleFileStorage(path=path)
|
||||||
|
|
||||||
async def list(self, *a, **kw):
|
def list(self, *a, **kw):
|
||||||
async for item in self._reader.list(*a, **kw):
|
return self._reader.list(*a, **kw)
|
||||||
yield item
|
|
||||||
|
|
||||||
async def get(self, *a, **kw):
|
def get(self, *a, **kw):
|
||||||
await aiostream.stream.list(self.list())
|
self.list()
|
||||||
return await self._reader.get(*a, **kw)
|
return self._reader.get(*a, **kw)
|
||||||
|
|
||||||
async def upload(self, *a, **kw):
|
def upload(self, *a, **kw):
|
||||||
return await self._writer.upload(*a, **kw)
|
return self._writer.upload(*a, **kw)
|
||||||
|
|
||||||
async def update(self, *a, **kw):
|
def update(self, *a, **kw):
|
||||||
return await self._writer.update(*a, **kw)
|
return self._writer.update(*a, **kw)
|
||||||
|
|
||||||
async def delete(self, *a, **kw):
|
def delete(self, *a, **kw):
|
||||||
return await self._writer.delete(*a, **kw)
|
return self._writer.delete(*a, **kw)
|
||||||
|
|
||||||
|
|
||||||
class TestHttpStorage(StorageTests):
|
class TestHttpStorage(StorageTests):
|
||||||
|
|
@ -55,39 +47,30 @@ class TestHttpStorage(StorageTests):
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_tmpdir(self, tmpdir, monkeypatch):
|
def setup_tmpdir(self, tmpdir, monkeypatch):
|
||||||
self.tmpfile = str(tmpdir.ensure("collection.txt"))
|
self.tmpfile = str(tmpdir.ensure('collection.txt'))
|
||||||
|
|
||||||
def callback(url, headers, **kwargs):
|
def _request(method, url, *args, **kwargs):
|
||||||
"""Read our tmpfile at request time.
|
assert method == 'GET'
|
||||||
|
assert url == 'http://localhost:123/collection.txt'
|
||||||
|
assert 'vdirsyncer' in kwargs['headers']['User-Agent']
|
||||||
|
r = Response()
|
||||||
|
r.status_code = 200
|
||||||
|
try:
|
||||||
|
with open(self.tmpfile, 'rb') as f:
|
||||||
|
r._content = f.read()
|
||||||
|
except IOError:
|
||||||
|
r._content = b''
|
||||||
|
|
||||||
We can't just read this during test setup since the file get written to
|
r.headers['Content-Type'] = 'text/icalendar'
|
||||||
during test execution.
|
r.encoding = 'utf-8'
|
||||||
|
return r
|
||||||
|
|
||||||
It might make sense to actually run a server serving the local file.
|
monkeypatch.setattr(vdirsyncer.storage.http, 'request', _request)
|
||||||
"""
|
|
||||||
assert headers["User-Agent"].startswith("vdirsyncer/")
|
|
||||||
|
|
||||||
with open(self.tmpfile) as f:
|
|
||||||
body = f.read()
|
|
||||||
|
|
||||||
return CallbackResult(
|
|
||||||
status=200,
|
|
||||||
body=body,
|
|
||||||
headers={"Content-Type": "text/calendar; charset=utf-8"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with aioresponses() as m:
|
|
||||||
m.get("http://localhost:123/collection.txt", callback=callback, repeat=True)
|
|
||||||
yield
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, aio_connector):
|
def get_storage_args(self):
|
||||||
async def inner(collection=None):
|
def inner(collection=None):
|
||||||
assert collection is None
|
assert collection is None
|
||||||
return {
|
return {'url': 'http://localhost:123/collection.txt',
|
||||||
"url": "http://localhost:123/collection.txt",
|
'path': self.tmpfile}
|
||||||
"path": self.tmpfile,
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
@ -8,12 +8,10 @@ from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestMemoryStorage(StorageTests):
|
class TestMemoryStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = MemoryStorage
|
storage_class = MemoryStorage
|
||||||
supports_collections = False
|
supports_collections = False
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self):
|
||||||
async def inner(**args):
|
return lambda **kw: kw
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
|
||||||
35
tests/storage/test_remotestorage.py
Normal file
35
tests/storage/test_remotestorage.py
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer.storage.remotestorage import \
|
||||||
|
RemoteStorageCalendars, RemoteStorageContacts
|
||||||
|
|
||||||
|
from . import StorageTests, get_server_mixin
|
||||||
|
|
||||||
|
remotestorage_server = os.environ['REMOTESTORAGE_SERVER']
|
||||||
|
ServerMixin = get_server_mixin(remotestorage_server)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteStorageTests(ServerMixin, StorageTests):
|
||||||
|
remotestorage_server = remotestorage_server
|
||||||
|
|
||||||
|
|
||||||
|
class TestCalendars(RemoteStorageTests):
|
||||||
|
storage_class = RemoteStorageCalendars
|
||||||
|
|
||||||
|
@pytest.fixture(params=['VTODO', 'VEVENT'])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
|
class TestContacts(RemoteStorageTests):
|
||||||
|
storage_class = RemoteStorageContacts
|
||||||
|
supports_collections = False
|
||||||
|
|
||||||
|
@pytest.fixture(params=['VCARD'])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
@ -8,15 +8,22 @@ from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestSingleFileStorage(StorageTests):
|
class TestSingleFileStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = SingleFileStorage
|
storage_class = SingleFileStorage
|
||||||
|
supports_collections = False
|
||||||
supports_metadata = False
|
supports_metadata = False
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture(autouse=True)
|
||||||
def get_storage_args(self, tmpdir):
|
def setup(self, tmpdir):
|
||||||
async def inner(collection="test"):
|
self._path = str(tmpdir.ensure('test.txt'))
|
||||||
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
|
|
||||||
if collection is not None:
|
|
||||||
rv = await self.storage_class.create_collection(**rv)
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self):
|
||||||
|
def inner(**kwargs):
|
||||||
|
kwargs.update(path=self._path)
|
||||||
|
return kwargs
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
def test_collection_arg(self, tmpdir):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
self.storage_class(str(tmpdir.join('foo.ics')), collection='ha')
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue