Compare commits

..

No commits in common. "main" and "0.18.0" have entirely different histories.
main ... 0.18.0

116 changed files with 3048 additions and 3932 deletions

View file

@ -5,26 +5,19 @@ packages:
- docker - docker
- docker-compose - docker-compose
# Build dependencies: # Build dependencies:
- python-pip
- python-wheel - python-wheel
- python-build
- python-installer
- python-setuptools-scm
# Runtime dependencies: # Runtime dependencies:
- python-atomicwrites
- python-click - python-click
- python-click-log - python-click-log
- python-click-threading - python-click-threading
- python-requests - python-requests
- python-aiohttp-oauthlib - python-requests-toolbelt
- python-tenacity
# Test dependencies: # Test dependencies:
- python-hypothesis - python-hypothesis
- python-pytest-cov - python-pytest-cov
- python-pytest-httpserver - python-pytest-localserver
- python-trustme
- python-pytest-asyncio
- python-aiohttp
- python-aiostream
- python-aioresponses
sources: sources:
- https://github.com/pimutils/vdirsyncer - https://github.com/pimutils/vdirsyncer
environment: environment:
@ -35,14 +28,11 @@ environment:
REQUIREMENTS: release REQUIREMENTS: release
# TODO: ETESYNC_TESTS # TODO: ETESYNC_TESTS
tasks: tasks:
- check-python:
python --version | grep 'Python 3.13'
- docker: |
sudo systemctl start docker
- setup: | - setup: |
sudo systemctl start docker
cd vdirsyncer cd vdirsyncer
python -m build --wheel --skip-dependency-check --no-isolation python setup.py build
sudo python -m installer dist/*.whl sudo pip install --no-index .
- test: | - test: |
cd vdirsyncer cd vdirsyncer
make -e ci-test make -e ci-test

View file

@ -3,13 +3,11 @@
# TODO: It might make more sense to test with an older Ubuntu or Fedora version # TODO: It might make more sense to test with an older Ubuntu or Fedora version
# here, and consider that our "oldest suppported environment". # here, and consider that our "oldest suppported environment".
image: alpine/3.19 # python 3.11 image: archlinux
packages: packages:
- docker - docker
- docker-cli
- docker-compose - docker-compose
- py3-pip - python-pip
- python3-dev
sources: sources:
- https://github.com/pimutils/vdirsyncer - https://github.com/pimutils/vdirsyncer
environment: environment:
@ -18,19 +16,15 @@ environment:
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos DAV_SERVER: radicale xandikos
REQUIREMENTS: minimal REQUIREMENTS: minimal
# TODO: ETESYNC_TESTS
tasks: tasks:
- venv: |
python3 -m venv $HOME/venv
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
- docker: |
sudo addgroup $(whoami) docker
sudo service docker start
- setup: | - setup: |
sudo systemctl start docker
cd vdirsyncer cd vdirsyncer
# Hack, no idea why it's needed
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
make -e install-dev make -e install-dev
- test: | - test: |
cd vdirsyncer cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test make -e ci-test
make -e ci-test-storage make -e ci-test-storage

View file

@ -5,10 +5,11 @@ packages:
- docker - docker
- docker-compose - docker-compose
- python-pip - python-pip
- twine
sources: sources:
- https://github.com/pimutils/vdirsyncer - https://github.com/pimutils/vdirsyncer
secrets: secrets:
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536 - a36c8ba3-fba0-4338-b402-6aea0fbe771e
environment: environment:
BUILD: test BUILD: test
CI: true CI: true
@ -17,29 +18,23 @@ environment:
REQUIREMENTS: release REQUIREMENTS: release
# TODO: ETESYNC_TESTS # TODO: ETESYNC_TESTS
tasks: tasks:
- venv: |
python -m venv $HOME/venv
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
- docker: |
sudo systemctl start docker
- setup: | - setup: |
sudo systemctl start docker
cd vdirsyncer cd vdirsyncer
make -e install-dev make -e install-dev -e install-docs
- test: | - test: |
cd vdirsyncer cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test make -e ci-test
make -e ci-test-storage make -e ci-test-storage
- check: | - style: |
cd vdirsyncer
make check
- check-secrets: |
# Stop here if this is a PR. PRs can't run with the below secrets.
[ -f ~/fastmail-secrets ] || complete-build
- extra-storages: |
set +x
source ~/fastmail-secrets
set -x
cd vdirsyncer cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/ export PATH=$PATH:~/.local/bin/
DAV_SERVER=fastmail pytest tests/storage make -e style
git describe --exact-match --tags || complete-build
- publish: |
cd vdirsyncer
python setup.py sdist bdist_wheel
twine upload dist/*

View file

@ -2,3 +2,10 @@ comment: false
coverage: coverage:
status: status:
patch: false patch: false
project:
unit:
flags: unit
system:
flags: system
storage:
flags: storage

1
.envrc
View file

@ -1 +0,0 @@
layout python3

38
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,38 @@
name: Publish
on:
push:
tags:
- 0.*
jobs:
github-release:
runs-on: ubuntu-18.04
name: Publish GitHub Release
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: 3.7
architecture: x64
- run: pip install wheel
- run: python setup.py sdist bdist_wheel
- uses: softprops/action-gh-release@v1
with:
files: dist/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
pypi:
runs-on: ubuntu-18.04
name: Publish package on PyPI
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: 3.7
architecture: x64
- run: pip install wheel
- run: python setup.py sdist bdist_wheel
- uses: pypa/gh-action-pypi-publish@master
with:
password: ${{ secrets.PYPI_TOKEN }}

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v4.0.1
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
args: [--markdown-linebreak-ext=md] args: [--markdown-linebreak-ext=md]
@ -8,32 +8,16 @@ repos:
- id: check-toml - id: check-toml
- id: check-added-large-files - id: check-added-large-files
- id: debug-statements - id: debug-statements
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://gitlab.com/pycqa/flake8
rev: "v1.15.0" rev: "3.9.2"
hooks: hooks:
- id: mypy - id: flake8
files: vdirsyncer/.* additional_dependencies: [flake8-import-order, flake8-bugbear]
additional_dependencies: - repo: https://github.com/psf/black
- types-setuptools rev: "21.6b0"
- types-docutils
- types-requests
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.11.4'
hooks: hooks:
- id: ruff - id: black
args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/asottile/reorder_python_imports
- id: ruff-format rev: v2.5.0
- repo: local
hooks: hooks:
- id: typos-syncroniz - id: reorder-python-imports
name: typos-syncroniz
language: system
# Not how you spell "synchronise"
entry: sh -c "git grep -i syncroniz"
files: ".*/.*"
- id: typos-text-icalendar
name: typos-text-icalendar
language: system
# It's "text/calendar", no "i".
entry: sh -c "git grep -i 'text/icalendar'"
files: ".*/.*"

View file

@ -1,16 +0,0 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
python:
install:
- method: pip
path: .
extra_requirements:
- docs

View file

@ -4,22 +4,15 @@ Contributors
In alphabetical order: In alphabetical order:
- Ben Boeckel - Ben Boeckel
- Bleala
- Christian Geier - Christian Geier
- Clément Mondon - Clément Mondon
- Corey Hinshaw
- Kai Herlemann
- Hugo Osvaldo Barrera - Hugo Osvaldo Barrera
- Jason Cox
- Julian Mehne - Julian Mehne
- Malte Kiefer - Malte Kiefer
- Marek Marczykowski-Górecki - Marek Marczykowski-Górecki
- Markus Unterwaditzer - Markus Unterwaditzer
- Michael Adler - Michael Adler
- rEnr3n
- Thomas Weißschuh - Thomas Weißschuh
- Witcher01
- samm81
Special thanks goes to: Special thanks goes to:

View file

@ -9,111 +9,6 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_. <https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.21.0
==============
- Implement retrying for ``google`` storage type when a rate limit is reached.
- ``tenacity`` is now a required dependency.
- Drop support for Python 3.8.
- Retry transient network errors for nullipotent requests.
Version 0.20.0
==============
- Remove dependency on abandoned ``atomicwrites`` library.
- Implement ``filter_hook`` for the HTTP storage.
- Drop support for Python 3.7.
- Add support for Python 3.12 and Python 3.13.
- Properly close the status database after using. This especially affects tests,
where we were leaking a large amount of file descriptors.
- Extend supported versions of ``aiostream`` to include 0.7.x.
Version 0.19.3
==============
- Added a no_delete option to the storage configuration. :gh:`1090`
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
- Add an option to request vCard v4.0. :gh:`1066`
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
- Implement digest auth. :gh:`1137`
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
Version 0.19.2
==============
- Improve the performance of ``SingleFileStorage``. :gh:`818`
- Properly document some caveats of the Google Contacts storage.
- Fix crash when using auth certs. :gh:`1033`
- The ``filesystem`` storage can be specified with ``type =
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
functional impact, and is merely for forward compatibility with the Rust
implementation of vdirsyncer.
- Python 3.10 and 3.11 are officially supported.
- Instructions for integrating with Google CalDav/CardDav have changed.
Applications now need to be registered as "Desktop applications". Using "Web
application" no longer works due to changes on Google's side. :gh:`1078`
Version 0.19.1
==============
- Fixed crash when operating on Google Contacts. :gh:`994`
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
- Instructions for integrating with Google CalDav/CardDav have changed.
Applications now need to be registered as "Web Application". :gh:`975`
- Various documentation updates.
Version 0.19.0
==============
- Add "shell" password fetch strategy to pass command string to a shell.
- Add "description" and "order" as metadata. These fetch the CalDAV:
calendar-description, ``CardDAV:addressbook-description`` and
``apple-ns:calendar-order`` properties respectively.
- Add a new ``showconfig`` status. This prints *some* configuration values as
JSON. This is intended to be used by external tools and helpers that interact
with ``vdirsyncer``, and considered experimental.
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
"create", it implicitly creates missing collections during sync without user
prompts. This simplifies workflows where collections should be automatically
created on both sides.
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
- ``pytest-localserver`` is no longer required for tests.
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
This will likely continue improving over time. :gh:`906`
- The ``google`` storage types no longer require ``requests-oauthlib``, but
require ``python-aiohttp-oauthlib`` instead.
- Vdirsyncer no longer includes experimental support for `EteSync
<https://www.etesync.com/>`_. The existing integration had not been supported
for a long time and no longer worked. Support for external storages may be
added if anyone is interested in maintaining an EteSync plugin. EteSync
users should consider using `etesync-dav`_.
- The ``plist`` for macOS has been dropped. It was broken and homebrew
generates their own based on package metadata. macOS users are encouraged to
use that as a reference.
.. _etesync-dav: https://github.com/etesync/etesync-dav
Changes to SSL configuration
----------------------------
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
you're validating certificate fingerprints, use ``sha256`` instead.
When using a custom ``verify_fingerprint``, CA validation is always disabled.
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
both features is insecure and no longer supported.
The ``verify`` parameter no longer takes boolean values, it is now optional and
only takes a string to a custom CA for verification.
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
parameter in future.
Version 0.18.0 Version 0.18.0
============== ==============

View file

@ -2,6 +2,7 @@
prune docker prune docker
prune scripts prune scripts
prune tests/storage/servers prune tests/storage/servers
prune tests/storage/etesync
recursive-include tests/storage/servers/radicale * recursive-include tests/storage/servers/radicale *
recursive-include tests/storage/servers/skip * recursive-include tests/storage/servers/skip *

View file

@ -12,6 +12,9 @@ export REQUIREMENTS := release
# Set this to true if you run vdirsyncer's test as part of e.g. packaging. # Set this to true if you run vdirsyncer's test as part of e.g. packaging.
export DETERMINISTIC_TESTS := false export DETERMINISTIC_TESTS := false
# Run the etesync testsuite.
export ETESYNC_TESTS := false
# Assume to run in CI. Don't use this outside of a virtual machine. It will # Assume to run in CI. Don't use this outside of a virtual machine. It will
# heavily "pollute" your system, such as attempting to install a new Python # heavily "pollute" your system, such as attempting to install a new Python
# systemwide. # systemwide.
@ -20,8 +23,19 @@ export CI := false
# Whether to generate coverage data while running tests. # Whether to generate coverage data while running tests.
export COVERAGE := $(CI) export COVERAGE := $(CI)
# Additional arguments that should be passed to py.test.
PYTEST_ARGS =
# Variables below this line are not very interesting for getting started. # Variables below this line are not very interesting for getting started.
TEST_EXTRA_PACKAGES =
ifeq ($(ETESYNC_TESTS), true)
TEST_EXTRA_PACKAGES += git+https://github.com/etesync/journal-manager@v0.5.2
TEST_EXTRA_PACKAGES += django djangorestframework==3.8.2 wsgi_intercept drf-nested-routers
endif
PYTEST = py.test $(PYTEST_ARGS)
CODECOV_PATH = /tmp/codecov.sh CODECOV_PATH = /tmp/codecov.sh
all: all:
@ -29,21 +43,35 @@ all:
ci-test: ci-test:
curl -s https://codecov.io/bash > $(CODECOV_PATH) curl -s https://codecov.io/bash > $(CODECOV_PATH)
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/ $(PYTEST) tests/unit/
bash $(CODECOV_PATH) -c bash $(CODECOV_PATH) -c -F unit
$(PYTEST) tests/system/
bash $(CODECOV_PATH) -c -F system
[ "$(ETESYNC_TESTS)" = "false" ] || make test-storage
ci-test-storage: ci-test-storage:
curl -s https://codecov.io/bash > $(CODECOV_PATH) curl -s https://codecov.io/bash > $(CODECOV_PATH)
set -ex; \ set -ex; \
for server in $(DAV_SERVER); do \ for server in $(DAV_SERVER); do \
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \ DAV_SERVER=$$server $(PYTEST) --cov-append tests/storage; \
done done
bash $(CODECOV_PATH) -c bash $(CODECOV_PATH) -c -F storage
check: test:
ruff check $(PYTEST)
ruff format --diff
#mypy vdirsyncer style:
pre-commit run --all
! git grep -i syncroniz */*
! git grep -i 'text/icalendar' */*
sphinx-build -W -b html ./docs/ ./docs/_build/html/
install-docs:
pip install -Ur docs-requirements.txt
docs:
cd docs && make html
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
release-deb: release-deb:
sh scripts/release-deb.sh debian jessie sh scripts/release-deb.sh debian jessie
@ -54,10 +82,12 @@ release-deb:
install-dev: install-dev:
pip install -U pip setuptools wheel pip install -U pip setuptools wheel
pip install -e '.[test,check,docs]' pip install -e .
pip install -Ur test-requirements.txt $(TEST_EXTRA_PACKAGES)
pip install pre-commit
[ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync]
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \ set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install pyproject-dependencies && \ pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
fi fi
.PHONY: docs .PHONY: docs

View file

@ -6,8 +6,8 @@ vdirsyncer
:target: https://builds.sr.ht/~whynothugo/vdirsyncer :target: https://builds.sr.ht/~whynothugo/vdirsyncer
:alt: CI status :alt: CI status
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main .. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main :target: https://codecov.io/github/pimutils/vdirsyncer?branch=master
:alt: Codecov coverage report :alt: Codecov coverage report
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/ .. image:: https://readthedocs.org/projects/vdirsyncer/badge/
@ -23,7 +23,7 @@ vdirsyncer
:alt: Debian packages :alt: Debian packages
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg .. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE :target: https://github.com/pimutils/vdirsyncer/blob/master/LICENCE
:alt: licence: BSD :alt: licence: BSD
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_ - `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
@ -40,7 +40,7 @@ servers. It can also be used to synchronize calendars and/or addressbooks
between two servers directly. between two servers directly.
It aims to be for calendars and contacts what `OfflineIMAP It aims to be for calendars and contacts what `OfflineIMAP
<https://www.offlineimap.org/>`_ is for emails. <http://offlineimap.org/>`_ is for emails.
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/ .. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
@ -59,15 +59,6 @@ Links of interest
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_ * `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
Dockerized
=================
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
Docker environment, you can check out the following GitHub Repository:
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
License License
======= =======

View file

@ -1,75 +0,0 @@
#!/usr/bin/env python3
"""Ask user to resolve a vdirsyncer sync conflict interactively.
Needs a way to ask the user.
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
Depends on python>3.5 and KDialog.
Usage:
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
This file is Free Software under the following license:
SPDX-License-Identifier: BSD-3-Clause
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
Author: <bernhard.reiter@intevation.de>
"""
from __future__ import annotations
import re
import subprocess
import sys
from pathlib import Path
KDIALOG = "/usr/bin/kdialog"
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
def get_summary(icalendar_text: str):
"""Get the first SUMMARY: line from an iCalendar text.
Do not care about the line being continued.
"""
match = re.search(SUMMARY_PATTERN, icalendar_text)
return match[1]
def main(ical1_filename, ical2_filename):
ical1 = ical1_filename.read_text()
ical2 = ical2_filename.read_text()
additional_args = ["--yes-label", "take first"] # return code == 0
additional_args += ["--no-label", "take second"] # return code == 1
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
r = subprocess.run(
args=[
KDIALOG,
"--warningyesnocancel",
"There was a sync conflict, do you prefer the first entry: \n"
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
"or the second entry:\n"
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
*additional_args,
]
)
if r.returncode == 2:
# cancel was pressed
return # shall lead to items not changed, because not copied
if r.returncode == 0:
# we want to take the first item, so overwrite the second
ical2_filename.write_text(ical1)
else: # r.returncode == 1, we want the second item, so overwrite the first
ical1_filename.write_text(ical2)
if len(sys.argv) != 3:
sys.stdout.write(__doc__)
else:
main(Path(sys.argv[1]), Path(sys.argv[2]))

43
contrib/vdirsyncer.plist Normal file
View file

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<!-- Blueprint for cron-like launchd plist -->
<!-- Replace @@PLACEHOLDERS@@ with appropriate values for your system/settings! -->
<plist version="1.0">
<dict>
<key>EnvironmentVariables</key>
<dict>
<!-- Locale to use for vdirsyncer, e.g. en_US.UTF-8 -->
<key>LANG</key>
<string>@@LOCALE@@</string>
<key>LC_ALL</key>
<string>@@LOCALE@@</string>
</dict>
<key>Label</key>
<string>vdirsyncer</string>
<key>WorkingDirectory</key>
<!-- working directory for vdirsyncer, usually the base directory where
vdirsyncer is installed, e.g. /usr/local/ -->
<string>@@WORKINGDIRECTORY@@</string>
<key>ProgramArguments</key>
<array>
<!-- full path to vdirsyncer binary -->
<string>@@VDIRSYNCER@@</string>
<!-- only log errors -->
<string>-v</string>
<string>ERROR</string>
<string>sync</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>StartInterval</key>
<!-- Sync intervall in seconds -->
<integer>@@SYNCINTERVALL@@</integer>
<!-- For logging, redirect stdout & stderr -->
<!-- <key>StandardErrorPath</key> -->
<!-- Full path to stderr logfile, e.g. /tmp/vdirsyncer_err.log -->
<!-- <string>@@STDERRFILE@@</string> -->
<!-- Full path to stdout logfile, e.g. /tmp/vdirsyncer_out.log -->
<!-- <key>StandardOutPath</key> -->
<!-- <string>@@STDOUTFILE@@</string> -->
</dict>
</plist>

View file

@ -1,7 +1,6 @@
[Unit] [Unit]
Description=Synchronize calendars and contacts Description=Synchronize calendars and contacts
Documentation=https://vdirsyncer.readthedocs.org/ Documentation=https://vdirsyncer.readthedocs.org/
StartLimitBurst=2
[Service] [Service]
ExecStart=/usr/bin/vdirsyncer sync ExecStart=/usr/bin/vdirsyncer sync

3
docs-requirements.txt Normal file
View file

@ -0,0 +1,3 @@
sphinx != 1.4.7
sphinx_rtd_theme
setuptools_scm

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import datetime import datetime
import os import os
@ -20,7 +18,7 @@ copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
release = get_distribution("vdirsyncer").version release = get_distribution("vdirsyncer").version
version = ".".join(release.split(".")[:2]) # The short X.Y version. version = ".".join(release.split(".")[:2]) # The short X.Y version.
rst_epilog = f".. |vdirsyncer_version| replace:: {release}" rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release
exclude_patterns = ["_build"] exclude_patterns = ["_build"]
@ -37,7 +35,9 @@ except ImportError:
html_theme = "default" html_theme = "default"
if not on_rtd: if not on_rtd:
print("-" * 74) print("-" * 74)
print("Warning: sphinx-rtd-theme not installed, building with default theme.") print(
"Warning: sphinx-rtd-theme not installed, building with default " "theme."
)
print("-" * 74) print("-" * 74)
html_static_path = ["_static"] html_static_path = ["_static"]
@ -76,7 +76,7 @@ def github_issue_role(name, rawtext, text, lineno, inliner, options=None, conten
try: try:
issue_num = int(text) issue_num = int(text)
if issue_num <= 0: if issue_num <= 0:
raise ValueError raise ValueError()
except ValueError: except ValueError:
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno) msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg) prb = inliner.problematic(rawtext, rawtext, msg)

View file

@ -61,8 +61,7 @@ Pair Section
sync`` is executed. See also :ref:`collections_tutorial`. sync`` is executed. See also :ref:`collections_tutorial`.
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
autodiscovery on a specific storage. It means all the collections on side A / autodiscovery on a specific storage.
side B.
If the collection you want to sync doesn't have the same name on each side, If the collection you want to sync doesn't have the same name on each side,
you may also use a value of the form ``["config_name", "name_a", "name_b"]``. you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
@ -72,8 +71,8 @@ Pair Section
Examples: Examples:
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all - ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
the collections from side B, and also the collections named "foo" and "bar". collections from side B, and also the collections named "foo" and "bar".
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all - ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
existing collections on either side. existing collections on either side.
@ -117,26 +116,10 @@ Pair Section
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer - ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
metasync`` is executed. Example:: metasync`` is executed. Example::
metadata = ["color", "displayname", "description", "order"] metadata = ["color", "displayname"]
This synchronizes the following properties: This synchronizes the ``color`` and the ``displayname`` properties. The
``conflict_resolution`` parameter applies here as well.
- color: ``http://apple.com/ns/ical/:calendar-color``
- displayname: ``DAV:displayname``
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
- order: ``http://apple.com/ns/ical/:calendar-order``
The ``conflict_resolution`` parameter applies for these properties too.
.. _implicit_def:
- ``implicit``: Opt into implicitly creating collections. Example::
implicit = "create"
When set to "create", missing collections are automatically created on both
sides during sync without prompting the user. This simplifies workflows where
all collections should be synchronized bidirectionally.
.. _storage_config: .. _storage_config:
@ -186,7 +169,7 @@ CalDAV and CardDAV
url = "..." url = "..."
#username = "" #username = ""
#password = "" #password = ""
#verify = /path/to/custom_ca.pem #verify = true
#auth = null #auth = null
#useragent = "vdirsyncer/0.16.4" #useragent = "vdirsyncer/0.16.4"
#verify_fingerprint = null #verify_fingerprint = null
@ -219,10 +202,12 @@ CalDAV and CardDAV
:param url: Base URL or an URL to a calendar. :param url: Base URL or an URL to a calendar.
:param username: Username for authentication. :param username: Username for authentication.
:param password: Password for authentication. :param password: Password for authentication.
:param verify: Optional. Local path to a self-signed SSL certificate. :param verify: Verify SSL certificate, default True. This can also be a
See :ref:`ssl-tutorial` for more information. local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected for more information.
server certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
expected server certificate. See :ref:`ssl-tutorial` for more
information.
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
default is preemptive Basic auth, sending credentials even if server default is preemptive Basic auth, sending credentials even if server
didn't request them. This saves from an additional roundtrip per didn't request them. This saves from an additional roundtrip per
@ -244,20 +229,21 @@ CalDAV and CardDAV
url = "..." url = "..."
#username = "" #username = ""
#password = "" #password = ""
#verify = /path/to/custom_ca.pem #verify = true
#auth = null #auth = null
#useragent = "vdirsyncer/0.16.4" #useragent = "vdirsyncer/0.16.4"
#verify_fingerprint = null #verify_fingerprint = null
#auth_cert = null #auth_cert = null
#use_vcard_4 = false
:param url: Base URL or an URL to an addressbook. :param url: Base URL or an URL to an addressbook.
:param username: Username for authentication. :param username: Username for authentication.
:param password: Password for authentication. :param password: Password for authentication.
:param verify: Optional. Local path to a self-signed SSL certificate. :param verify: Verify SSL certificate, default True. This can also be a
See :ref:`ssl-tutorial` for more information. local path to a self-signed SSL certificate. See
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected :ref:`ssl-tutorial` for more information.
server certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected
server certificate. See :ref:`ssl-tutorial` for
more information.
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
default is preemptive Basic auth, sending credentials even if default is preemptive Basic auth, sending credentials even if
server didn't request them. This saves from an additional server didn't request them. This saves from an additional
@ -267,7 +253,6 @@ CalDAV and CardDAV
certificate and the key or a list of paths to the files certificate and the key or a list of paths to the files
with them. with them.
:param useragent: Default ``vdirsyncer``. :param useragent: Default ``vdirsyncer``.
:param use_vcard_4: Whether the server use vCard 4.0.
Google Google
++++++ ++++++
@ -281,14 +266,6 @@ in terms of data safety**. See `this blog post
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back <https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
up your data. up your data.
Another caveat is that Google group labels are not synced with vCard's
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
(also see :gh:`814` and
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
for reference) and the
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
synced when only partial date information is present (e.g. the year is missing).
At first run you will be asked to authorize application for Google account At first run you will be asked to authorize application for Google account
access. access.
@ -300,29 +277,25 @@ Furthermore you need to register vdirsyncer as an application yourself to
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
Service to hardcode those into opensource software [googleterms]_: Service to hardcode those into opensource software [googleterms]_:
1. Go to the `Google API Manager <https://console.developers.google.com>`_ 1. Go to the `Google API Manager <https://console.developers.google.com>`_ and
create a new project under any name.
2. Create a new project under any name.
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the 2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
Calendar and Contacts APIs, those are different and won't work). There should Calendar and Contacts APIs, those are different and won't work). There should
be a search box where you can just enter those terms. be a searchbox where you can just enter those terms.
3. In the sidebar, select "Credentials", then "Create Credentials" and create a 3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The
new "OAuth Client ID". application type is "Other".
You'll be prompted to create a OAuth consent screen first. Fill out that You'll be prompted to create a OAuth consent screen first. Fill out that
form however you like. form however you like.
After setting up the consent screen, finish creating the new "OAuth Client
ID'. The correct application type is "Desktop application".
4. Finally you should have a Client ID and a Client secret. Provide these in 4. Finally you should have a Client ID and a Client secret. Provide these in
your storage config. your storage config.
The ``token_file`` parameter should be a path to a file where vdirsyncer can The ``token_file`` parameter should be a filepath where vdirsyncer can later
later store authentication-related data. You do not need to create the file store authentication-related data. You do not need to create the file itself
itself or write anything to it. or write anything to it.
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_, .. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
section "Confidential Matters". section "Confidential Matters".
@ -330,7 +303,7 @@ itself or write anything to it.
.. note:: .. note::
You need to configure which calendars Google should offer vdirsyncer using You need to configure which calendars Google should offer vdirsyncer using
a secret `settings page a rather hidden `settings page
<https://calendar.google.com/calendar/syncselect>`_. <https://calendar.google.com/calendar/syncselect>`_.
.. storage:: google_calendar .. storage:: google_calendar
@ -370,9 +343,55 @@ itself or write anything to it.
:param client_id/client_secret: OAuth credentials, obtained from the Google :param client_id/client_secret: OAuth credentials, obtained from the Google
API Manager. API Manager.
The current flow is not ideal, but Google has deprecated the previous APIs used EteSync
for this without providing a suitable replacement. See :gh:`975` for discussion +++++++
on the topic.
`EteSync <https://www.etesync.com/>`_ is a new cloud provider for end to end
encrypted contacts and calendar storage. Vdirsyncer contains **experimental**
support for it.
To use it, you need to install some optional dependencies::
pip install vdirsyncer[etesync]
On first usage you will be prompted for the service password and the encryption
password. Neither are stored.
.. storage:: etesync_contacts
Contacts for etesync.
::
[storage example_for_etesync_contacts]
email = ...
secrets_dir = ...
#server_path = ...
#db_path = ...
:param email: The email address of your account.
:param secrets_dir: A directory where vdirsyncer can store the encryption
key and authentication token.
:param server_url: Optional. URL to the root of your custom server.
:param db_path: Optional. Use a different path for the database.
.. storage:: etesync_calendars
Calendars for etesync.
::
[storage example_for_etesync_calendars]
email = ...
secrets_dir = ...
#server_path = ...
#db_path = ...
:param email: The email address of your account.
:param secrets_dir: A directory where vdirsyncer can store the encryption
key and authentication token.
:param server_url: Optional. URL to the root of your custom server.
:param db_path: Optional. Use a different path for the database.
Local Local
+++++ +++++
@ -389,7 +408,6 @@ Local
fileext = "..." fileext = "..."
#encoding = "utf-8" #encoding = "utf-8"
#post_hook = null #post_hook = null
#pre_deletion_hook = null
#fileignoreext = ".tmp" #fileignoreext = ".tmp"
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
@ -411,8 +429,6 @@ Local
:param post_hook: A command to call for each item creation and :param post_hook: A command to call for each item creation and
modification. The command will be called with the path of the modification. The command will be called with the path of the
new/updated file. new/updated file.
:param pre_deletion_hook: A command to call for each item deletion.
The command will be called with the path of the deleted file.
:param fileeignoreext: The file extention to ignore. It is only useful :param fileeignoreext: The file extention to ignore. It is only useful
if fileext is set to the empty string. The default is ``.tmp``. if fileext is set to the empty string. The default is ``.tmp``.
@ -494,7 +510,6 @@ leads to an error.
[storage holidays_remote] [storage holidays_remote]
type = "http" type = "http"
url = https://example.com/holidays_from_hicksville.ics url = https://example.com/holidays_from_hicksville.ics
#filter_hook = null
Too many WebCAL providers generate UIDs of all ``VEVENT``-components Too many WebCAL providers generate UIDs of all ``VEVENT``-components
on-the-fly, i.e. all UIDs change every time the calendar is downloaded. on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
@ -507,10 +522,12 @@ leads to an error.
:param url: URL to the ``.ics`` file. :param url: URL to the ``.ics`` file.
:param username: Username for authentication. :param username: Username for authentication.
:param password: Password for authentication. :param password: Password for authentication.
:param verify: Optional. Local path to a self-signed SSL certificate. :param verify: Verify SSL certificate, default True. This can also be a
See :ref:`ssl-tutorial` for more information. local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected for more information.
server certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
expected server certificate. See :ref:`ssl-tutorial` for more
information.
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
default is preemptive Basic auth, sending credentials even if server default is preemptive Basic auth, sending credentials even if server
didn't request them. This saves from an additional roundtrip per didn't request them. This saves from an additional roundtrip per
@ -519,8 +536,3 @@ leads to an error.
:param auth_cert: Optional. Either a path to a certificate with a client :param auth_cert: Optional. Either a path to a certificate with a client
certificate and the key or a list of paths to the files with them. certificate and the key or a list of paths to the files with them.
:param useragent: Default ``vdirsyncer``. :param useragent: Default ``vdirsyncer``.
:param filter_hook: Optional. A filter command to call for each fetched
item, passed in raw form to stdin and returned via stdout.
If nothing is returned by the filter command, the item is skipped.
This can be used to alter fields as needed when dealing with providers
generating malformed events.

View file

@ -9,4 +9,7 @@ Support and Contact
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for * Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
concrete bug reports and feature requests. concrete bug reports and feature requests.
* For security issues, contact ``contact@pimutils.org``. * Lastly, you can also `contact the author directly
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
that doesn't work out (i.e. if I don't respond within one week), use
``contact@pimutils.org``.

View file

@ -79,20 +79,22 @@ For many patches, it might suffice to just let CI run the tests. However,
CI is slow, so you might want to run them locally too. For this, set up a CI is slow, so you might want to run them locally too. For this, set up a
virtualenv_ and run this inside of it:: virtualenv_ and run this inside of it::
# Install development dependencies, including: # install:
# - vdirsyncer from the repo into the virtualenv # - vdirsyncer from the repo into the virtualenv
# - style checks and formatting (ruff) # - stylecheckers (flake8) and code formatters (autopep8)
make install-dev make install-dev
# Install git commit hook for some extra linting and checking # Install git commit hook for some extra linting and checking
pre-commit install pre-commit install
# Install development dependencies
make install-dev
Then you can run:: Then you can run::
pytest # The normal testsuite make test # The normal testsuite
pre-commit run --all # Run all linters (which also run via pre-commit) make style # Stylechecker
make -C docs html # Build the HTML docs, output is at docs/_build/html/ make docs # Build the HTML docs, output is at docs/_build/html/
make -C docs linkcheck # Check docs for any broken links
The ``Makefile`` has a lot of options that allow you to control which tests are The ``Makefile`` has a lot of options that allow you to control which tests are
run, and which servers are tested. Take a look at its code where they are all run, and which servers are tested. Take a look at its code where they are all

View file

@ -2,14 +2,23 @@
Donations Donations
========= =========
vdirsyncer is and will always be free and open source software. We appreciate
sponsors willing to fund our continued work on it.
If you found my work useful, please consider donating. Thank you! If you found my work useful, please consider donating. Thank you!
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``. - Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt``
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
- Sponsor via recurring donations `via liberapay`_.
.. _via Ko-fi: https://ko-fi.com/whynothugo - `PayPal.me <https://www.paypal.me/untitaker>`_
.. _via liberapay: https://liberapay.com/WhyNotHugo/
- `Bountysource <https://www.bountysource.com/teams/vdirsyncer>`_ is useful for
funding work on a specific GitHub issue.
- There's also `Bountysource Salt
<https://salt.bountysource.com/teams/vdirsyncer>`_, for one-time and
recurring donations.
- Donations via Bountysource are publicly listed. Use PayPal if you dislike
that.
- `Flattr
<https://flattr.com/submit/auto?user_id=untitaker&url=https%3A%2F%2Fgithub.com%2Fpimutils%2Fvdirsyncer>`_
or `Gratipay <https://gratipay.com/vdirsyncer/>`_ can be used for
recurring donations.

View file

@ -7,18 +7,17 @@ Installation
OS/distro packages OS/distro packages
------------------ ------------------
The following packages are community-contributed and were up-to-date at the The following packages are user-contributed and were up-to-date at the time of
time of writing: writing:
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_ - `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_
- `Ubuntu and Debian, x86_64-only - `Ubuntu and Debian, x86_64-only
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist <https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
in the official repositories but may be out of date) in the official repositories but may be out of date)
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_ - `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_ - `OS X (homebrew) <http://braumeister.org/formula/vdirsyncer>`_
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_ - `BSD (pkgsrc) <http://pkgsrc.se/time/py-vdirsyncer>`_
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_ - `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
We only support the latest version of vdirsyncer, which is at the time of this We only support the latest version of vdirsyncer, which is at the time of this
writing |vdirsyncer_version|. Please **do not file bugs if you use an older writing |vdirsyncer_version|. Please **do not file bugs if you use an older
@ -42,53 +41,27 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
use Python's package manager "pip". First, you'll have to check that the use Python's package manager "pip". First, you'll have to check that the
following things are installed: following things are installed:
- Python 3.9 to 3.13 and pip. - Python 3.7+ and pip.
- ``libxml`` and ``libxslt`` - ``libxml`` and ``libxslt``
- ``zlib`` - ``zlib``
- Linux or macOS. **Windows is not supported**, see :gh:`535`. - Linux or OS X. **Windows is not supported**, see :gh:`535`.
On Linux systems, using the distro's package manager is the best On Linux systems, using the distro's package manager is the best
way to do this, for example, using Ubuntu:: way to do this, for example, using Ubuntu::
sudo apt-get install libxml2 libxslt1.1 zlib1g python3 sudo apt-get install libxml2 libxslt1.1 zlib1g python
Then you have several options. The following text applies for most Python Then you have several options. The following text applies for most Python
software by the way. software by the way.
pipx: The clean, easy way
~~~~~~~~~~~~~~~~~~~~~~~~~
pipx_ is a new package manager for Python-based software that automatically
sets up a virtual environment for each program it installs. Please note that
installing via pipx will not include manual pages nor systemd services.
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
Assuming that pipx is installed, vdirsyncer can be installed with::
pipx install vdirsyncer
It can later be updated to the latest version with::
pipx upgrade vdirsyncer
And can be uninstalled with::
pipx uninstall vdirsyncer
This last command will remove vdirsyncer and any dependencies installed into
the above location.
.. _pipx: https://github.com/pipxproject/pipx
The dirty, easy way The dirty, easy way
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
If pipx is not available on your distribution, the easiest way to install The easiest way to install vdirsyncer at this point would be to run::
vdirsyncer at this point would be to run::
pip install --ignore-installed vdirsyncer pip install --user --ignore-installed vdirsyncer
- ``--user`` is to install without root rights (into your home directory)
- ``--ignore-installed`` is to work around Debian's potentially broken packages - ``--ignore-installed`` is to work around Debian's potentially broken packages
(see :ref:`debian-urllib3`). (see :ref:`debian-urllib3`).
@ -119,4 +92,25 @@ This method has two advantages:
distro-specific issues. distro-specific issues.
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely. - You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
The clean, easy way
~~~~~~~~~~~~~~~~~~~
pipx_ is a new package manager for Python-based software that automatically
sets up a virtualenv for each program you install. Assuming you have it
installed on your operating system, you can do::
pipx install vdirsyncer
and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
update vdirsyncer to the latest version::
pipx upgrade vdirsyncer
If you're done with vdirsyncer, you can do::
pipx uninstall vdirsyncer
and vdirsyncer will be uninstalled, including its dependencies.
.. _virtualenv: https://virtualenv.readthedocs.io/ .. _virtualenv: https://virtualenv.readthedocs.io/
.. _pipx: https://github.com/pipxproject/pipx

View file

@ -38,12 +38,6 @@ You can fetch the username as well::
Or really any kind of parameter in a storage section. Or really any kind of parameter in a storage section.
You can also pass the command as a string to be executed in a shell::
[storage foo]
...
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
With pass_ for example, you might find yourself writing something like this in With pass_ for example, you might find yourself writing something like this in
your configuration file:: your configuration file::
@ -78,19 +72,3 @@ You can also simply prompt for the password::
type = "caldav" type = "caldav"
username = "myusername" username = "myusername"
password.fetch = ["prompt", "Password for CalDAV"] password.fetch = ["prompt", "Password for CalDAV"]
Environment variable
===============
To read the password from an environment variable::
[storage foo]
type = "caldav"
username = "myusername"
password.fetch = ["command", "printenv", "DAV_PW"]
This is especially handy if you use the same password multiple times
(say, for a CardDAV and a CalDAV storage).
On bash, you can read and export the password without printing::
read -s DAV_PW "DAV Password: " && export DAV_PW

View file

@ -46,16 +46,15 @@ You can install the all development dependencies with::
make install-dev make install-dev
You probably don't want this since it will use pip to download the You probably don't want this since it will use pip to download the
dependencies. Alternatively test dependencies are listed as ``test`` optional dependencies. Alternatively you can find the testing dependencies in
dependencies in ``pyproject.toml``, again with lower-bound version ``test-requirements.txt``, again with lower-bound version requirements.
requirements.
You also have to have vdirsyncer fully installed at this point. Merely You also have to have vdirsyncer fully installed at this point. Merely
``cd``-ing into the tarball will not be sufficient. ``cd``-ing into the tarball will not be sufficient.
Running the tests happens with:: Running the tests happens with::
pytest make test
Hypothesis will randomly generate test input. If you care about deterministic Hypothesis will randomly generate test input. If you care about deterministic
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``:: tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
@ -74,11 +73,10 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
take care of the HTML docs' formatting. take care of the HTML docs' formatting.
You can find a list of dependencies in ``pyproject.toml``, in the You can find a list of dependencies in ``docs-requirements.txt``. Again, you
``project.optional-dependencies`` section as ``docs``. Again, you can install can install those using pip with::
those using pip with::
pip install '.[docs]' make install-docs
Then change into the ``docs/`` directory and build whatever format you want Then change into the ``docs/`` directory and build whatever format you want
using the ``Makefile`` in there (run ``make`` for the formats you can build). using the ``Makefile`` in there (run ``make`` for the formats you can build).

View file

@ -18,5 +18,5 @@ package that don't play well with packages assuming a normal ``requests``. This
is due to stubbornness on both sides. is due to stubbornness on both sides.
See :gh:`82` and :gh:`140` for past discussions. You have one option to work See :gh:`82` and :gh:`140` for past discussions. You have one option to work
around this, that is, to install vdirsyncer in a virtual environment, see around this, that is, to install vdirsyncer in a virtualenv, see
:ref:`manual-installation`. :ref:`manual-installation`.

View file

@ -14,14 +14,21 @@ To pin the certificate by fingerprint::
[storage foo] [storage foo]
type = "caldav" type = "caldav"
... ...
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E" verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
#verify = false # Optional: Disable CA validation, useful for self-signed certs
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported. SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their
CA validation is disabled when pinning a fingerprint. length.
You can use the following command for obtaining a SHA256 fingerprint:: You can use the following command for obtaining a SHA-1 fingerprint::
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256 echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
Note that ``verify_fingerprint`` doesn't suffice for vdirsyncer to work with
self-signed certificates (or certificates that are not in your trust store). You
most likely need to set ``verify = false`` as well. This disables verification
of the SSL certificate's expiration time and the existence of it in your trust
store, all that's verified now is the fingerprint.
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
that you can forget about all of that. It is easier to deploy a free that you can forget about all of that. It is easier to deploy a free
@ -40,16 +47,22 @@ To point vdirsyncer to a custom set of root CAs::
... ...
verify = "/path/to/cert.pem" verify = "/path/to/cert.pem"
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext Vdirsyncer uses the requests_ library, which, by default, `uses its own set of
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default. trusted CAs
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
There are cases where certificate validation fails even though you can access However, the actual behavior depends on how you have installed it. Many Linux
the server fine through e.g. your browser. This usually indicates that your distributions patch their ``python-requests`` package to use the system
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In certificate CAs. Normally these two stores are similar enough for you to not
such cases, it makes sense to explicitly set ``verify`` or care.
``verify_fingerprint`` as shown above.
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html But there are cases where certificate validation fails even though you can
access the server fine through e.g. your browser. This usually indicates that
your installation of the ``requests`` library is somehow broken. In such cases,
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
above.
.. _requests: http://www.python-requests.org/
.. _ssl-client-certs: .. _ssl-client-certs:

View file

@ -16,7 +16,7 @@ Configuration
.. note:: .. note::
- The `config.example from the repository - The `config.example from the repository
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_ <https://github.com/pimutils/vdirsyncer/blob/master/config.example>`_
contains a very terse version of this. contains a very terse version of this.
- In this example we set up contacts synchronization, but calendar sync - In this example we set up contacts synchronization, but calendar sync
@ -176,11 +176,8 @@ as a file called ``color`` within the calendar folder.
More information about collections More information about collections
---------------------------------- ----------------------------------
"Collection" is a collective term for addressbooks and calendars. A Cardav or "Collection" is a collective term for addressbooks and calendars. Each
Caldav server can contains several "collections" which correspond to several collection from a storage has a "collection name", a unique identifier for each
addressbooks or calendar.
Each collection from a storage has a "collection name", a unique identifier for each
collection. In the case of :storage:`filesystem`-storage, this is the name of the collection. In the case of :storage:`filesystem`-storage, this is the name of the
directory that represents the collection, in the case of the DAV-storages this directory that represents the collection, in the case of the DAV-storages this
is the last segment of the URL. We use this identifier in the ``collections`` is the last segment of the URL. We use this identifier in the ``collections``

View file

@ -37,7 +37,7 @@ Further applications, with missing pages:
.. _khal: http://lostpackets.de/khal/ .. _khal: http://lostpackets.de/khal/
.. _dayplanner: http://www.day-planner.org/ .. _dayplanner: http://www.day-planner.org/
.. _Orage: https://gitlab.xfce.org/apps/orage .. _Orage: http://www.kolumbus.fi/~w408237/orage/
.. _rainlendar: http://www.rainlendar.net/ .. _rainlendar: http://www.rainlendar.net/
.. _khard: https://github.com/scheibler/khard/ .. _khard: https://github.com/scheibler/khard/
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c .. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c

View file

@ -13,8 +13,8 @@ minutes).
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_ unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``. into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service .. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.service
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer .. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.timer
Activation Activation
---------- ----------

View file

@ -48,9 +48,10 @@ instance to subfolders of ``~/.calendar/``.
Setting up todoman Setting up todoman
================== ==================
Write this to ``~/.config/todoman/config.py``:: Write this to ``~/.config/todoman/todoman.conf``::
path = "~/.calendars/*" [main]
path = ~/.calendars/*
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``, The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
which is exactly the tasklists we want. Now you can use ``todoman`` as which is exactly the tasklists we want. Now you can use ``todoman`` as

View file

@ -56,11 +56,8 @@ have any file extensions.
known from CSS, for example) are allowed. The prefixing ``#`` must be known from CSS, for example) are allowed. The prefixing ``#`` must be
present. present.
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/ - A file called ``displayname`` contains a UTF-8 encoded label that may be used
description, that may be used to represent the vdir in UIs. to represent the vdir in UIs.
- A file called ``order`` inside the vdir includes the relative order
of the calendar, a property that is only relevant in UI design.
Writing to vdirs Writing to vdirs
================ ================

View file

@ -50,6 +50,7 @@ program chosen:
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
e.g. DAVx⁵_ or other apps bundled with smartphones. e.g. DAVx⁵_ or the apps by dmfs_.
.. _DAVx⁵: https://www.davx5.com/ .. _DAVx⁵: https://www.davx5.com/
.. _dmfs: https://dmfs.org/

View file

@ -1,29 +0,0 @@
# Push new version to PyPI.
#
# Usage: hut builds submit publish-release.yaml --follow
image: alpine/edge
packages:
- py3-build
- py3-pip
- py3-setuptools
- py3-setuptools_scm
- py3-wheel
- twine
sources:
- https://github.com/pimutils/vdirsyncer
secrets:
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
environment:
CI: true
tasks:
- check-tag: |
cd vdirsyncer
git fetch --tags
# Stop here unless this is a tag.
git describe --exact-match --tags || complete-build
- publish: |
cd vdirsyncer
python -m build --no-isolation
twine upload --non-interactive dist/*

View file

@ -1,114 +0,0 @@
# Vdirsyncer synchronizes calendars and contacts.
#
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
# how to package vdirsyncer.
[build-system]
requires = ["setuptools>=64", "setuptools_scm>=8"]
build-backend = "setuptools.build_meta"
[project]
name = "vdirsyncer"
authors = [
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
]
description = "Synchronize calendars and contacts"
readme = "README.rst"
requires-python = ">=3.9"
keywords = ["todo", "task", "icalendar", "cli"]
license = "BSD-3-Clause"
license-files = ["LICENSE"]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Office/Business :: Scheduling",
"Topic :: Utilities",
]
dependencies = [
"click>=5.0,<9.0",
"click-log>=0.3.0,<0.5.0",
"requests>=2.20.0",
"aiohttp>=3.8.2,<4.0.0",
"aiostream>=0.4.3,<0.8.0",
"tenacity>=9.0.0",
]
dynamic = ["version"]
[project.optional-dependencies]
google = ["aiohttp-oauthlib"]
test = [
"hypothesis>=6.72.0,<7.0.0",
"pytest",
"pytest-cov",
"pytest-httpserver",
"trustme",
"pytest-asyncio",
"aioresponses",
]
docs = [
"sphinx!=1.4.7",
"sphinx_rtd_theme",
"setuptools_scm",
]
check = [
"mypy",
"ruff",
"types-docutils",
"types-requests",
"types-setuptools",
]
[project.scripts]
vdirsyncer = "vdirsyncer.cli:app"
[tool.ruff.lint]
extend-select = [
"B0",
"C4",
"E",
"I",
"RSE",
"SIM",
"TID",
"UP",
"W",
]
[tool.ruff.lint.isort]
force-single-line = true
required-imports = ["from __future__ import annotations"]
[tool.pytest.ini_options]
addopts = """
--tb=short
--cov-config .coveragerc
--cov=vdirsyncer
--cov-report=term-missing:skip-covered
--no-cov-on-fail
--color=yes
"""
# filterwarnings=error
asyncio_default_fixture_loop_scope = "function"
[tool.mypy]
ignore_missing_imports = true
[tool.coverage.report]
exclude_lines = [
"if TYPE_CHECKING:",
]
[tool.setuptools.packages.find]
include = ["vdirsyncer*"]
[tool.setuptools_scm]
write_to = "vdirsyncer/version.py"
version_scheme = "no-guess-dev"

View file

@ -1,49 +0,0 @@
#!/bin/bash
#
# This script is mean to be run inside a dedicated container,
# and not interatively.
set -ex
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y build-essential fakeroot debhelper git
apt-get install -y python3-all python3-pip python3-venv
apt-get install -y ruby ruby-dev
pip3 install virtualenv virtualenv-tools3
virtualenv -p python3 /vdirsyncer/env/
gem install fpm
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
pip3 uninstall -y virtualenv
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
chmod +x /usr/local/bin/virtualenv
cp -r /source/ /vdirsyncer/vdirsyncer/
cd /vdirsyncer/vdirsyncer/ || exit 2
mkdir /vdirsyncer/pkgs/
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
# XXX: Do I really not want google support included?
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
fpm --verbose \
--input-type virtualenv \
--output-type deb \
--name "vdirsyncer-latest" \
--version "$(cat version)" \
--prefix /opt/venvs/vdirsyncer-latest \
--depends python3 \
requirements.txt
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
cd /vdirsyncer/pkgs/
dpkg -i -- *.deb
# Check that it works:
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
cp -- *.deb /source/

42
scripts/dpkg.Dockerfile Normal file
View file

@ -0,0 +1,42 @@
ARG distro
ARG distrover
FROM $distro:$distrover
RUN apt-get update
RUN apt-get install -y build-essential fakeroot debhelper git
RUN apt-get install -y python3-all python3-pip python3-venv
RUN apt-get install -y ruby ruby-dev
RUN gem install fpm package_cloud
RUN pip3 install virtualenv virtualenv-tools3
RUN virtualenv -p python3 /vdirsyncer/env/
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
RUN pip3 uninstall -y virtualenv
RUN echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
RUN chmod +x /usr/local/bin/virtualenv
COPY . /vdirsyncer/vdirsyncer/
WORKDIR /vdirsyncer/vdirsyncer/
RUN mkdir /vdirsyncer/pkgs/
RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt
RUN fpm --verbose \
--input-type virtualenv \
--output-type deb \
--name "vdirsyncer-latest" \
--version "$(cat version)" \
--prefix /opt/venvs/vdirsyncer-latest \
--depends python3 \
requirements.txt
RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
WORKDIR /vdirsyncer/pkgs/
RUN dpkg -i *.deb
# Check that it works:
RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version

View file

@ -1,56 +1,26 @@
#!/bin/sh #!/bin/sh
set -xeu set -xe
SCRIPT_PATH=$(realpath "$0") DISTRO=$1
SCRIPT_DIR=$(dirname "$SCRIPT_PATH") DISTROVER=$2
# E.g.: debian, ubuntu NAME="vdirsyncer-${DISTRO}-${DISTROVER}:latest"
DISTRO=${DISTRO:1}
# E.g.: bullseye, bookwork
DISTROVER=${DISTROVER:2}
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
CONTEXT="$(mktemp -d)" CONTEXT="$(mktemp -d)"
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
cleanup() {
rm -rf "$CONTEXT"
}
trap cleanup EXIT
# Prepare files.
cp scripts/_build_deb_in_container.bash "$CONTEXT"
python setup.py sdist -d "$CONTEXT" python setup.py sdist -d "$CONTEXT"
docker run -it \ # Build the package in a container with the right distro version.
--name "$CONTAINER_NAME" \ docker build \
--volume "$CONTEXT:/source" \ --build-arg distro=$DISTRO \
"$DISTRO:$DISTROVER" \ --build-arg distrover=$DISTROVER \
bash /source/_build_deb_in_container.bash -t $NAME \
-f scripts/dpkg.Dockerfile \
"$CONTEXT"
# Keep around the package filename. # Push the package to packagecloud.
PACKAGE=$(ls "$CONTEXT"/*.deb) # TODO: Use ~/.packagecloud for CI.
PACKAGE=$(basename "$PACKAGE") docker run -e PACKAGECLOUD_TOKEN=$PACKAGECLOUD_TOKEN $NAME \
bash -xec "package_cloud push pimutils/vdirsyncer/$DISTRO/$DISTROVER *.deb"
# Save the build deb files. rm -rf "$CONTEXT"
mkdir -p "$DEST_DIR"
cp "$CONTEXT"/*.deb "$DEST_DIR"
echo Build complete! 🤖
# Packagecloud uses some internal IDs for each distro.
# Extract the one for the distro we're publishing.
DISTRO_ID=$(
curl -s \
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
)
# Actually push the package.
curl \
-F "package[distro_version_id]=$DISTRO_ID" \
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
echo Done! ✨

21
setup.cfg Normal file
View file

@ -0,0 +1,21 @@
[wheel]
universal = 1
[tool:pytest]
addopts =
--tb=short
--cov-config .coveragerc
--cov=vdirsyncer
--cov-report=term-missing
--no-cov-on-fail
[flake8]
application-import-names = tests,vdirsyncer
extend-ignore =
E203, # Black-incompatible colon spacing.
W503, # Line jump before binary operator.
I100,
I202
max-line-length = 88
exclude = .eggs,build
import-order-style = smarkets

82
setup.py Normal file
View file

@ -0,0 +1,82 @@
"""
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
"""
from setuptools import Command
from setuptools import find_packages
from setuptools import setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
"click>=5.0,<9.0",
"click-log>=0.3.0, <0.4.0",
# https://github.com/pimutils/vdirsyncer/issues/478
"click-threading>=0.5",
"requests >=2.20.0",
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
"requests_toolbelt >=0.4.0",
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
"atomicwrites>=0.1.7",
]
class PrintRequirements(Command):
description = "Prints minimal requirements"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
with open("README.rst") as f:
long_description = f.read()
setup(
# General metadata
name="vdirsyncer",
author="Markus Unterwaditzer",
author_email="markus@unterwaditzer.net",
url="https://github.com/pimutils/vdirsyncer",
description="Synchronize calendars and contacts",
license="BSD",
long_description=long_description,
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
"google": ["requests-oauthlib"],
"etesync": ["etesync==0.5.2", "django<2.0"],
},
# Build dependencies
setup_requires=["setuptools_scm != 1.12.0"],
# Other
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
cmdclass={"minimal_requirements": PrintRequirements},
use_scm_version={"write_to": "vdirsyncer/version.py"},
entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Utilities",
],
)

4
test-requirements.txt Normal file
View file

@ -0,0 +1,4 @@
hypothesis>=5.0.0,<7.0.0
pytest
pytest-cov
pytest-localserver

View file

@ -1,9 +1,6 @@
""" """
Test suite for vdirsyncer. Test suite for vdirsyncer.
""" """
from __future__ import annotations
import hypothesis.strategies as st import hypothesis.strategies as st
import urllib3.exceptions import urllib3.exceptions
@ -103,8 +100,10 @@ X-SOMETHING:{r}
HAHA:YES HAHA:YES
END:FOO""" END:FOO"""
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs"))) printable_characters_strategy = st.text(
st.characters(blacklist_categories=("Cc", "Cs"))
)
uid_strategy = st.text( uid_strategy = st.text(
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1 st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)

View file

@ -1,19 +1,14 @@
""" """
General-purpose fixtures for vdirsyncer's testsuite. General-purpose fixtures for vdirsyncer's testsuite.
""" """
from __future__ import annotations
import logging import logging
import os import os
import aiohttp
import click_log import click_log
import pytest import pytest
import pytest_asyncio
from hypothesis import HealthCheck from hypothesis import HealthCheck
from hypothesis import Verbosity
from hypothesis import settings from hypothesis import settings
from hypothesis import Verbosity
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -29,6 +24,7 @@ except ImportError:
def benchmark(): def benchmark():
return lambda x: x() return lambda x: x()
else: else:
del pytest_benchmark del pytest_benchmark
@ -45,7 +41,7 @@ settings.register_profile(
"deterministic", "deterministic",
settings( settings(
derandomize=True, derandomize=True,
suppress_health_check=list(HealthCheck), suppress_health_check=HealthCheck.all(),
), ),
) )
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow])) settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
@ -56,15 +52,3 @@ elif os.environ.get("CI", "false").lower() == "true":
settings.load_profile("ci") settings.load_profile("ci")
else: else:
settings.load_profile("dev") settings.load_profile("dev")
@pytest_asyncio.fixture
async def aio_session():
async with aiohttp.ClientSession() as session:
yield session
@pytest_asyncio.fixture
async def aio_connector():
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
yield conn

View file

@ -1,20 +1,16 @@
from __future__ import annotations
import random import random
import textwrap import textwrap
import uuid import uuid
from urllib.parse import quote as urlquote from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote from urllib.parse import unquote as urlunquote
import aiostream
import pytest import pytest
import pytest_asyncio
from tests import EVENT_TEMPLATE from .. import assert_item_equals
from tests import TASK_TEMPLATE from .. import EVENT_TEMPLATE
from tests import VCARD_TEMPLATE from .. import normalize_item
from tests import assert_item_equals from .. import TASK_TEMPLATE
from tests import normalize_item from .. import VCARD_TEMPLATE
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.storage.base import normalize_meta_value
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
@ -50,12 +46,11 @@ class StorageTests:
:param collection: The name of the collection to create and use. :param collection: The name of the collection to create and use.
""" """
raise NotImplementedError raise NotImplementedError()
@pytest_asyncio.fixture @pytest.fixture
async def s(self, get_storage_args): def s(self, get_storage_args):
rv = self.storage_class(**await get_storage_args()) return self.storage_class(**get_storage_args())
return rv
@pytest.fixture @pytest.fixture
def get_item(self, item_type): def get_item(self, item_type):
@ -77,209 +72,180 @@ class StorageTests:
if not self.supports_metadata: if not self.supports_metadata:
pytest.skip("This storage does not support metadata.") pytest.skip("This storage does not support metadata.")
@pytest.mark.asyncio def test_generic(self, s, get_item):
async def test_generic(self, s, get_item):
items = [get_item() for i in range(1, 10)] items = [get_item() for i in range(1, 10)]
hrefs = [] hrefs = []
for item in items: for item in items:
href, etag = await s.upload(item) href, etag = s.upload(item)
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = s.get(href)
hrefs.append((href, etag)) hrefs.append((href, etag))
hrefs.sort() hrefs.sort()
assert hrefs == sorted(await aiostream.stream.list(s.list())) assert hrefs == sorted(s.list())
for href, etag in hrefs: for href, etag in hrefs:
assert isinstance(href, (str, bytes)) assert isinstance(href, (str, bytes))
assert isinstance(etag, (str, bytes)) assert isinstance(etag, (str, bytes))
assert await s.has(href) assert s.has(href)
item, etag2 = await s.get(href) item, etag2 = s.get(href)
assert etag == etag2 assert etag == etag2
@pytest.mark.asyncio def test_empty_get_multi(self, s):
async def test_empty_get_multi(self, s): assert list(s.get_multi([])) == []
assert await aiostream.stream.list(s.get_multi([])) == []
@pytest.mark.asyncio def test_get_multi_duplicates(self, s, get_item):
async def test_get_multi_duplicates(self, s, get_item): href, etag = s.upload(get_item())
href, etag = await s.upload(get_item())
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = s.get(href)
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2)) ((href2, item, etag2),) = s.get_multi([href] * 2)
assert href2 == href assert href2 == href
assert etag2 == etag assert etag2 == etag
@pytest.mark.asyncio def test_upload_already_existing(self, s, get_item):
async def test_upload_already_existing(self, s, get_item):
item = get_item() item = get_item()
await s.upload(item) s.upload(item)
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.upload(item) s.upload(item)
@pytest.mark.asyncio def test_upload(self, s, get_item):
async def test_upload(self, s, get_item):
item = get_item() item = get_item()
href, _etag = await s.upload(item) href, etag = s.upload(item)
assert_item_equals((await s.get(href))[0], item) assert_item_equals(s.get(href)[0], item)
@pytest.mark.asyncio def test_update(self, s, get_item):
async def test_update(self, s, get_item):
item = get_item() item = get_item()
href, etag = await s.upload(item) href, etag = s.upload(item)
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = s.get(href)
assert_item_equals((await s.get(href))[0], item) assert_item_equals(s.get(href)[0], item)
new_item = get_item(uid=item.uid) new_item = get_item(uid=item.uid)
new_etag = await s.update(href, new_item, etag) new_etag = s.update(href, new_item, etag)
if new_etag is None: if new_etag is None:
_, new_etag = await s.get(href) _, new_etag = s.get(href)
# See https://github.com/pimutils/vdirsyncer/issues/48 # See https://github.com/pimutils/vdirsyncer/issues/48
assert isinstance(new_etag, (bytes, str)) assert isinstance(new_etag, (bytes, str))
assert_item_equals((await s.get(href))[0], new_item) assert_item_equals(s.get(href)[0], new_item)
@pytest.mark.asyncio def test_update_nonexisting(self, s, get_item):
async def test_update_nonexisting(self, s, get_item):
item = get_item() item = get_item()
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.update("huehue", item, '"123"') s.update("huehue", item, '"123"')
@pytest.mark.asyncio def test_wrong_etag(self, s, get_item):
async def test_wrong_etag(self, s, get_item):
item = get_item() item = get_item()
href, _etag = await s.upload(item) href, etag = s.upload(item)
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.update(href, item, '"lolnope"') s.update(href, item, '"lolnope"')
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.delete(href, '"lolnope"') s.delete(href, '"lolnope"')
@pytest.mark.asyncio def test_delete(self, s, get_item):
async def test_delete(self, s, get_item): href, etag = s.upload(get_item())
href, etag = await s.upload(get_item()) s.delete(href, etag)
await s.delete(href, etag) assert not list(s.list())
assert not await aiostream.stream.list(s.list())
@pytest.mark.asyncio def test_delete_nonexisting(self, s, get_item):
async def test_delete_nonexisting(self, s, get_item):
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.delete("1", '"123"') s.delete("1", '"123"')
@pytest.mark.asyncio def test_list(self, s, get_item):
async def test_list(self, s, get_item): assert not list(s.list())
assert not await aiostream.stream.list(s.list()) href, etag = s.upload(get_item())
href, etag = await s.upload(get_item())
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = s.get(href)
assert await aiostream.stream.list(s.list()) == [(href, etag)] assert list(s.list()) == [(href, etag)]
@pytest.mark.asyncio def test_has(self, s, get_item):
async def test_has(self, s, get_item): assert not s.has("asd")
assert not await s.has("asd") href, etag = s.upload(get_item())
href, etag = await s.upload(get_item()) assert s.has(href)
assert await s.has(href) assert not s.has("asd")
assert not await s.has("asd") s.delete(href, etag)
await s.delete(href, etag) assert not s.has(href)
assert not await s.has(href)
@pytest.mark.asyncio def test_update_others_stay_the_same(self, s, get_item):
async def test_update_others_stay_the_same(self, s, get_item):
info = {} info = {}
for _ in range(4): for _ in range(4):
href, etag = await s.upload(get_item()) href, etag = s.upload(get_item())
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = s.get(href)
info[href] = etag info[href] = etag
items = await aiostream.stream.list( assert {
s.get_multi(href for href, etag in info.items()) href: etag
) for href, item, etag in s.get_multi(href for href, etag in info.items())
assert {href: etag for href, item, etag in items} == info } == info
def test_repr(self, s): def test_repr(self, s, get_storage_args):
assert self.storage_class.__name__ in repr(s) assert self.storage_class.__name__ in repr(s)
assert s.instance_name is None assert s.instance_name is None
@pytest.mark.asyncio def test_discover(self, requires_collections, get_storage_args, get_item):
async def test_discover(
self,
requires_collections,
get_storage_args,
get_item,
aio_connector,
):
collections = set() collections = set()
for i in range(1, 5): for i in range(1, 5):
collection = f"test{i}" collection = f"test{i}"
s = self.storage_class(**await get_storage_args(collection=collection)) s = self.storage_class(**get_storage_args(collection=collection))
assert not await aiostream.stream.list(s.list()) assert not list(s.list())
await s.upload(get_item()) s.upload(get_item())
collections.add(s.collection) collections.add(s.collection)
discovered = await aiostream.stream.list( actual = {
self.storage_class.discover(**await get_storage_args(collection=None)) c["collection"]
) for c in self.storage_class.discover(**get_storage_args(collection=None))
actual = {c["collection"] for c in discovered} }
assert actual >= collections assert actual >= collections
@pytest.mark.asyncio def test_create_collection(self, requires_collections, get_storage_args, get_item):
async def test_create_collection(
self,
requires_collections,
get_storage_args,
get_item,
):
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"): if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
pytest.skip("Manual cleanup would be necessary.") pytest.skip("Manual cleanup would be necessary.")
if getattr(self, "dav_server", "") == "radicale": if getattr(self, "dav_server", "") == "radicale":
pytest.skip("Radicale does not support collection creation") pytest.skip("Radicale does not support collection creation")
args = await get_storage_args(collection=None) args = get_storage_args(collection=None)
args["collection"] = "test" args["collection"] = "test"
s = self.storage_class(**await self.storage_class.create_collection(**args)) s = self.storage_class(**self.storage_class.create_collection(**args))
href = (await s.upload(get_item()))[0] href = s.upload(get_item())[0]
assert href in await aiostream.stream.list( assert href in (href for href, etag in s.list())
(href async for href, etag in s.list())
)
@pytest.mark.asyncio def test_discover_collection_arg(self, requires_collections, get_storage_args):
async def test_discover_collection_arg( args = get_storage_args(collection="test2")
self, requires_collections, get_storage_args
):
args = await get_storage_args(collection="test2")
with pytest.raises(TypeError) as excinfo: with pytest.raises(TypeError) as excinfo:
await aiostream.stream.list(self.storage_class.discover(**args)) list(self.storage_class.discover(**args))
assert "collection argument must not be given" in str(excinfo.value) assert "collection argument must not be given" in str(excinfo.value)
@pytest.mark.asyncio def test_collection_arg(self, get_storage_args):
async def test_collection_arg(self, get_storage_args): if self.storage_class.storage_name.startswith("etesync"):
pytest.skip("etesync uses UUIDs.")
if self.supports_collections: if self.supports_collections:
s = self.storage_class(**await get_storage_args(collection="test2")) s = self.storage_class(**get_storage_args(collection="test2"))
# Can't do stronger assertion because of radicale, which needs a # Can't do stronger assertion because of radicale, which needs a
# fileextension to guess the collection type. # fileextension to guess the collection type.
assert "test2" in s.collection assert "test2" in s.collection
else: else:
with pytest.raises(ValueError): with pytest.raises(ValueError):
self.storage_class(collection="ayy", **await get_storage_args()) self.storage_class(collection="ayy", **get_storage_args())
@pytest.mark.asyncio def test_case_sensitive_uids(self, s, get_item):
async def test_case_sensitive_uids(self, s, get_item):
if s.storage_name == "filesystem": if s.storage_name == "filesystem":
pytest.skip("Behavior depends on the filesystem.") pytest.skip("Behavior depends on the filesystem.")
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
await s.upload(get_item(uid=uid.upper())) s.upload(get_item(uid=uid.upper()))
await s.upload(get_item(uid=uid.lower())) s.upload(get_item(uid=uid.lower()))
items = [href async for href, etag in s.list()] items = [href for href, etag in s.list()]
assert len(items) == 2 assert len(items) == 2
assert len(set(items)) == 2 assert len(set(items)) == 2
@pytest.mark.asyncio def test_specialchars(
async def test_specialchars(
self, monkeypatch, requires_collections, get_storage_args, get_item self, monkeypatch, requires_collections, get_storage_args, get_item
): ):
if getattr(self, "dav_server", "") == "radicale":
pytest.skip("Radicale is fundamentally broken.")
if getattr(self, "dav_server", "") in ("icloud", "fastmail"): if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
pytest.skip("iCloud and FastMail reject this name.") pytest.skip("iCloud and FastMail reject this name.")
@ -288,68 +254,42 @@ class StorageTests:
uid = "test @ foo ät bar град сатану" uid = "test @ foo ät bar град сатану"
collection = "test @ foo ät bar" collection = "test @ foo ät bar"
s = self.storage_class(**await get_storage_args(collection=collection)) s = self.storage_class(**get_storage_args(collection=collection))
item = get_item(uid=uid) item = get_item(uid=uid)
href, etag = await s.upload(item) href, etag = s.upload(item)
item2, etag2 = await s.get(href) item2, etag2 = s.get(href)
if etag is not None: if etag is not None:
assert etag2 == etag assert etag2 == etag
assert_item_equals(item2, item) assert_item_equals(item2, item)
((_, etag3),) = await aiostream.stream.list(s.list()) ((_, etag3),) = s.list()
assert etag2 == etag3 assert etag2 == etag3
# etesync uses UUIDs for collection names
if self.storage_class.storage_name.startswith("etesync"):
return
assert collection in urlunquote(s.collection) assert collection in urlunquote(s.collection)
if self.storage_class.storage_name.endswith("dav"): if self.storage_class.storage_name.endswith("dav"):
assert urlquote(uid, "/@:") in href assert urlquote(uid, "/@:") in href
@pytest.mark.asyncio def test_metadata(self, requires_metadata, s):
async def test_newline_in_uid( if not getattr(self, "dav_server", ""):
self, monkeypatch, requires_collections, get_storage_args, get_item assert not s.get_meta("color")
): assert not s.get_meta("displayname")
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
s = self.storage_class(**await get_storage_args())
item = get_item(uid=uid)
href, etag = await s.upload(item)
item2, etag2 = await s.get(href)
if etag is not None:
assert etag2 == etag
assert_item_equals(item2, item)
((_, etag3),) = await aiostream.stream.list(s.list())
assert etag2 == etag3
@pytest.mark.asyncio
async def test_empty_metadata(self, requires_metadata, s):
if getattr(self, "dav_server", ""):
pytest.skip()
assert await s.get_meta("color") is None
assert await s.get_meta("displayname") is None
@pytest.mark.asyncio
async def test_metadata(self, requires_metadata, s):
if getattr(self, "dav_server", "") == "xandikos":
pytest.skip("xandikos does not support removing metadata.")
try: try:
await s.set_meta("color", None) s.set_meta("color", None)
assert await s.get_meta("color") is None assert not s.get_meta("color")
await s.set_meta("color", "#ff0000") s.set_meta("color", "#ff0000")
assert await s.get_meta("color") == "#ff0000" assert s.get_meta("color") == "#ff0000"
except exceptions.UnsupportedMetadataError: except exceptions.UnsupportedMetadataError:
pass pass
@pytest.mark.asyncio
async def test_encoding_metadata(self, requires_metadata, s):
for x in ("hello world", "hello wörld"): for x in ("hello world", "hello wörld"):
await s.set_meta("displayname", x) s.set_meta("displayname", x)
rv = await s.get_meta("displayname") rv = s.get_meta("displayname")
assert rv == x assert rv == x
assert isinstance(rv, str) assert isinstance(rv, str)
@ -366,25 +306,23 @@ class StorageTests:
"فلسطين", "فلسطين",
], ],
) )
@pytest.mark.asyncio def test_metadata_normalization(self, requires_metadata, s, value):
async def test_metadata_normalization(self, requires_metadata, s, value): x = s.get_meta("displayname")
x = await s.get_meta("displayname")
assert x == normalize_meta_value(x) assert x == normalize_meta_value(x)
if not getattr(self, "dav_server", None): if not getattr(self, "dav_server", None):
# ownCloud replaces "" with "unnamed" # ownCloud replaces "" with "unnamed"
await s.set_meta("displayname", value) s.set_meta("displayname", value)
assert await s.get_meta("displayname") == normalize_meta_value(value) assert s.get_meta("displayname") == normalize_meta_value(value)
@pytest.mark.asyncio def test_recurring_events(self, s, item_type):
async def test_recurring_events(self, s, item_type):
if item_type != "VEVENT": if item_type != "VEVENT":
pytest.skip("This storage instance doesn't support iCalendar.") pytest.skip("This storage instance doesn't support iCalendar.")
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
item = Item( item = Item(
textwrap.dedent( textwrap.dedent(
f""" """
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
BEGIN:VEVENT BEGIN:VEVENT
@ -405,7 +343,7 @@ class StorageTests:
BEGIN:VEVENT BEGIN:VEVENT
DTSTART;TZID=UTC:20140128T083000Z DTSTART;TZID=UTC:20140128T083000Z
DTEND;TZID=UTC:20140128T100000Z DTEND;TZID=UTC:20140128T100000Z
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z RRULE:FREQ=WEEKLY;UNTIL=20141208T213000Z;BYDAY=TU
DTSTAMP:20140327T060506Z DTSTAMP:20140327T060506Z
UID:{uid} UID:{uid}
CREATED:20131216T033331Z CREATED:20131216T033331Z
@ -418,11 +356,13 @@ class StorageTests:
TRANSP:OPAQUE TRANSP:OPAQUE
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
""" """.format(
uid=uid
)
).strip() ).strip()
) )
href, _etag = await s.upload(item) href, etag = s.upload(item)
item2, _etag2 = await s.get(href) item2, etag2 = s.get(href)
assert normalize_item(item) == normalize_item(item2) assert normalize_item(item) == normalize_item(item2)

View file

@ -1,14 +1,9 @@
from __future__ import annotations
import asyncio
import contextlib import contextlib
import subprocess import subprocess
import time import time
import uuid import uuid
import aiostream
import pytest import pytest
import pytest_asyncio
import requests import requests
@ -50,7 +45,6 @@ def dockerised_server(name, container_port, exposed_port):
[ [
"docker", "docker",
"run", "run",
"--rm",
"--detach", "--detach",
"--publish", "--publish",
f"{exposed_port}:{container_port}", f"{exposed_port}:{container_port}",
@ -85,32 +79,32 @@ def xandikos_server():
yield yield
@pytest_asyncio.fixture @pytest.fixture
async def slow_create_collection(request, aio_connector): def slow_create_collection(request):
# We need to properly clean up because otherwise we might run into # We need to properly clean up because otherwise we might run into
# storage limits. # storage limits.
to_delete = [] to_delete = []
async def inner(cls: type, args: dict, collection_name: str) -> dict: def delete_collections():
"""Create a collection for s in to_delete:
s.session.request("DELETE", "")
Returns args necessary to create a Storage instance pointing to it. request.addfinalizer(delete_collections)
"""
assert collection_name.startswith("test")
# Make each name unique def inner(cls, args, collection):
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}" assert collection.startswith("test")
collection += "-vdirsyncer-ci-" + str(uuid.uuid4())
# Create the collection: args = cls.create_collection(collection, **args)
args = await cls.create_collection(collection_name, **args) s = cls(**args)
collection = cls(**args) _clear_collection(s)
assert not list(s.list())
# Keep collection in a list to be deleted once tests end: to_delete.append(s)
to_delete.append(collection)
assert not await aiostream.stream.list(collection.list())
return args return args
yield inner return inner
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
def _clear_collection(s):
for href, etag in s.list():
s.delete(href, etag)

View file

@ -1,18 +1,16 @@
from __future__ import annotations
import os import os
import uuid import uuid
import aiohttp
import aiostream
import pytest import pytest
import requests.exceptions
from .. import get_server_mixin
from .. import StorageTests
from tests import assert_item_equals from tests import assert_item_equals
from tests.storage import StorageTests
from tests.storage import get_server_mixin
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
dav_server = os.environ.get("DAV_SERVER", "skip") dav_server = os.environ.get("DAV_SERVER", "skip")
ServerMixin = get_server_mixin(dav_server) ServerMixin = get_server_mixin(dav_server)
@ -21,33 +19,30 @@ class DAVStorageTests(ServerMixin, StorageTests):
dav_server = dav_server dav_server = dav_server
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.") @pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
@pytest.mark.asyncio def test_dav_broken_item(self, s):
async def test_dav_broken_item(self, s):
item = Item("HAHA:YES") item = Item("HAHA:YES")
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)): with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)):
await s.upload(item) s.upload(item)
assert not await aiostream.stream.list(s.list()) assert not list(s.list())
@pytest.mark.asyncio def test_dav_empty_get_multi_performance(self, s, monkeypatch):
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
def breakdown(*a, **kw): def breakdown(*a, **kw):
raise AssertionError("Expected not to be called.") raise AssertionError("Expected not to be called.")
monkeypatch.setattr("requests.sessions.Session.request", breakdown) monkeypatch.setattr("requests.sessions.Session.request", breakdown)
try: try:
assert list(await aiostream.stream.list(s.get_multi([]))) == [] assert list(s.get_multi([])) == []
finally: finally:
# Make sure monkeypatch doesn't interfere with DAV server teardown # Make sure monkeypatch doesn't interfere with DAV server teardown
monkeypatch.undo() monkeypatch.undo()
@pytest.mark.asyncio def test_dav_unicode_href(self, s, get_item, monkeypatch):
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
if self.dav_server == "radicale": if self.dav_server == "radicale":
pytest.skip("Radicale is unable to deal with unicode hrefs") pytest.skip("Radicale is unable to deal with unicode hrefs")
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext) monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
item = get_item(uid="град сатану" + str(uuid.uuid4())) item = get_item(uid="град сатану" + str(uuid.uuid4()))
href, _etag = await s.upload(item) href, etag = s.upload(item)
item2, _etag2 = await s.get(href) item2, etag2 = s.get(href)
assert_item_equals(item, item2) assert_item_equals(item, item2)

View file

@ -1,24 +1,18 @@
from __future__ import annotations
import contextlib
import datetime import datetime
from textwrap import dedent from textwrap import dedent
import aiohttp
import aiostream
import pytest import pytest
from aioresponses import aioresponses import requests.exceptions
from . import dav_server
from . import DAVStorageTests
from .. import format_item
from tests import EVENT_TEMPLATE from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE from tests import VCARD_TEMPLATE
from tests.storage import format_item
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.dav import CalDAVStorage from vdirsyncer.storage.dav import CalDAVStorage
from . import DAVStorageTests
from . import dav_server
class TestCalDAVStorage(DAVStorageTests): class TestCalDAVStorage(DAVStorageTests):
storage_class = CalDAVStorage storage_class = CalDAVStorage
@ -27,20 +21,20 @@ class TestCalDAVStorage(DAVStorageTests):
def item_type(self, request): def item_type(self, request):
return request.param return request.param
@pytest.mark.asyncio @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
async def test_doesnt_accept_vcard(self, item_type, get_storage_args): def test_doesnt_accept_vcard(self, item_type, get_storage_args):
s = self.storage_class(item_types=(item_type,), **await get_storage_args()) s = self.storage_class(item_types=(item_type,), **get_storage_args())
# Most storages hard-fail, but xandikos doesn't. try:
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError): s.upload(format_item(VCARD_TEMPLATE))
await s.upload(format_item(VCARD_TEMPLATE)) except (exceptions.Error, requests.exceptions.HTTPError):
pass
assert not await aiostream.stream.list(s.list()) assert not list(s.list())
# The `arg` param is not named `item_types` because that would hit # The `arg` param is not named `item_types` because that would hit
# https://bitbucket.org/pytest-dev/pytest/issue/745/ # https://bitbucket.org/pytest-dev/pytest/issue/745/
@pytest.mark.parametrize( @pytest.mark.parametrize(
("arg", "calls_num"), "arg,calls_num",
[ [
(("VTODO",), 1), (("VTODO",), 1),
(("VEVENT",), 1), (("VEVENT",), 1),
@ -50,11 +44,10 @@ class TestCalDAVStorage(DAVStorageTests):
], ],
) )
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
@pytest.mark.asyncio def test_item_types_performance(
async def test_item_types_performance(
self, get_storage_args, arg, calls_num, monkeypatch self, get_storage_args, arg, calls_num, monkeypatch
): ):
s = self.storage_class(item_types=arg, **await get_storage_args()) s = self.storage_class(item_types=arg, **get_storage_args())
old_parse = s._parse_prop_responses old_parse = s._parse_prop_responses
calls = [] calls = []
@ -63,18 +56,17 @@ class TestCalDAVStorage(DAVStorageTests):
return old_parse(*a, **kw) return old_parse(*a, **kw)
monkeypatch.setattr(s, "_parse_prop_responses", new_parse) monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
await aiostream.stream.list(s.list()) list(s.list())
assert len(calls) == calls_num assert len(calls) == calls_num
@pytest.mark.xfail( @pytest.mark.xfail(
dav_server == "radicale", reason="Radicale doesn't support timeranges." dav_server == "radicale", reason="Radicale doesn't support timeranges."
) )
@pytest.mark.asyncio def test_timerange_correctness(self, get_storage_args):
async def test_timerange_correctness(self, get_storage_args):
start_date = datetime.datetime(2013, 9, 10) start_date = datetime.datetime(2013, 9, 10)
end_date = datetime.datetime(2013, 9, 13) end_date = datetime.datetime(2013, 9, 13)
s = self.storage_class( s = self.storage_class(
start_date=start_date, end_date=end_date, **await get_storage_args() start_date=start_date, end_date=end_date, **get_storage_args()
) )
too_old_item = format_item( too_old_item = format_item(
@ -131,44 +123,50 @@ class TestCalDAVStorage(DAVStorageTests):
).strip() ).strip()
) )
await s.upload(too_old_item) s.upload(too_old_item)
await s.upload(too_new_item) s.upload(too_new_item)
expected_href, _ = await s.upload(good_item) expected_href, _ = s.upload(good_item)
((actual_href, _),) = await aiostream.stream.list(s.list()) ((actual_href, _),) = s.list()
assert actual_href == expected_href assert actual_href == expected_href
@pytest.mark.asyncio def test_invalid_resource(self, monkeypatch, get_storage_args):
async def test_invalid_resource(self, monkeypatch, get_storage_args): calls = []
args = await get_storage_args(collection=None) args = get_storage_args(collection=None)
with aioresponses() as m: def request(session, method, url, **kwargs):
m.add(args["url"], method="PROPFIND", status=200, body="Hello world") assert url == args["url"]
calls.append(None)
with pytest.raises(ValueError): r = requests.Response()
s = self.storage_class(**args) r.status_code = 200
await aiostream.stream.list(s.list()) r._content = b"Hello World."
return r
assert len(m.requests) == 1 monkeypatch.setattr("requests.sessions.Session.request", request)
with pytest.raises(ValueError):
s = self.storage_class(**args)
list(s.list())
assert len(calls) == 1
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT") @pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
@pytest.mark.skipif( @pytest.mark.skipif(
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs." dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
) )
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
@pytest.mark.asyncio def test_item_types_general(self, s):
async def test_item_types_general(self, s): event = s.upload(format_item(EVENT_TEMPLATE))[0]
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0] task = s.upload(format_item(TASK_TEMPLATE))[0]
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
s.item_types = ("VTODO", "VEVENT") s.item_types = ("VTODO", "VEVENT")
async def hrefs(): def hrefs():
return {href async for href, etag in s.list()} return {href for href, etag in s.list()}
assert await hrefs() == {event, task} assert hrefs() == {event, task}
s.item_types = ("VTODO",) s.item_types = ("VTODO",)
assert await hrefs() == {task} assert hrefs() == {task}
s.item_types = ("VEVENT",) s.item_types = ("VEVENT",)
assert await hrefs() == {event} assert hrefs() == {event}
s.item_types = () s.item_types = ()
assert await hrefs() == {event, task} assert hrefs() == {event, task}

View file

@ -1,10 +1,7 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.dav import CardDAVStorage
from . import DAVStorageTests from . import DAVStorageTests
from vdirsyncer.storage.dav import CardDAVStorage
class TestCardDAVStorage(DAVStorageTests): class TestCardDAVStorage(DAVStorageTests):

View file

@ -1,10 +1,7 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.dav import _BAD_XML_CHARS from vdirsyncer.storage.dav import _BAD_XML_CHARS
from vdirsyncer.storage.dav import _merge_xml from vdirsyncer.storage.dav import _merge_xml
from vdirsyncer.storage.dav import _normalize_href
from vdirsyncer.storage.dav import _parse_xml from vdirsyncer.storage.dav import _parse_xml
@ -41,19 +38,9 @@ def test_xml_utilities():
def test_xml_specialchars(char): def test_xml_specialchars(char):
x = _parse_xml( x = _parse_xml(
'<?xml version="1.0" encoding="UTF-8" ?>' '<?xml version="1.0" encoding="UTF-8" ?>'
f"<foo>ye{chr(char)}s\r\n" "<foo>ye{}s\r\n"
"hello</foo>".encode("ascii") "hello</foo>".format(chr(char)).encode("ascii")
) )
if char in _BAD_XML_CHARS: if char in _BAD_XML_CHARS:
assert x.text == "yes\nhello" assert x.text == "yes\nhello"
@pytest.mark.parametrize(
"href",
[
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
],
)
def test_normalize_href(href):
assert href == _normalize_href("https://example.com", href)

View file

Binary file not shown.

View file

@ -0,0 +1,122 @@
"""
Django settings for etesync_server project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "d7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"rest_framework",
"rest_framework.authtoken",
"journal.apps.JournalConfig",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "etesync_server.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "etesync_server.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.environ.get("ETESYNC_DB_PATH", os.path.join(BASE_DIR, "db.sqlite3")),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # noqa
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = "/static/"

View file

@ -0,0 +1,37 @@
"""etesync_server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from journal import views
from rest_framework_nested import routers
router = routers.DefaultRouter()
router.register(r"journals", views.JournalViewSet)
router.register(r"journal/(?P<journal_uid>[^/]+)", views.EntryViewSet)
router.register(r"user", views.UserInfoViewSet)
journals_router = routers.NestedSimpleRouter(router, r"journals", lookup="journal")
journals_router.register(r"members", views.MembersViewSet, base_name="journal-members")
journals_router.register(r"entries", views.EntryViewSet, base_name="journal-entries")
urlpatterns = [
url(r"^api/v1/", include(router.urls)),
url(r"^api/v1/", include(journals_router.urls)),
]
# Adding this just for testing, this shouldn't be here normally
urlpatterns += (url(r"^reset/$", views.reset, name="reset_debug"),)

View file

@ -0,0 +1,15 @@
"""
WSGI config for etesync_server project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
application = get_wsgi_application()

View file

@ -0,0 +1,22 @@
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)

View file

@ -0,0 +1 @@
63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a

Binary file not shown.

View file

@ -0,0 +1,88 @@
import os
import shutil
import sys
import pytest
import requests
from .. import StorageTests
from vdirsyncer.storage.etesync import EtesyncCalendars
from vdirsyncer.storage.etesync import EtesyncContacts
pytestmark = pytest.mark.skipif(
os.getenv("ETESYNC_TESTS", "") != "true", reason="etesync tests disabled"
)
@pytest.fixture(scope="session")
def etesync_app(tmpdir_factory):
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "etesync_server"))
db = tmpdir_factory.mktemp("etesync").join("etesync.sqlite")
shutil.copy(
os.path.join(os.path.dirname(__file__), "etesync_server", "db.sqlite3"), str(db)
)
os.environ["ETESYNC_DB_PATH"] = str(db)
from etesync_server.wsgi import application
return application
class EtesyncTests(StorageTests):
supports_metadata = False
@pytest.fixture
def get_storage_args(self, request, get_item, tmpdir, etesync_app):
import wsgi_intercept
import wsgi_intercept.requests_intercept
wsgi_intercept.requests_intercept.install()
wsgi_intercept.add_wsgi_intercept("127.0.0.1", 8000, lambda: etesync_app)
def teardown():
wsgi_intercept.remove_wsgi_intercept("127.0.0.1", 8000)
wsgi_intercept.requests_intercept.uninstall()
request.addfinalizer(teardown)
with open(
os.path.join(os.path.dirname(__file__), "test@localhost/auth_token")
) as f:
token = f.read().strip()
headers = {"Authorization": "Token " + token}
r = requests.post(
"http://127.0.0.1:8000/reset/", headers=headers, allow_redirects=False
)
assert r.status_code == 200
def inner(collection="test"):
rv = {
"email": "test@localhost",
"db_path": str(tmpdir.join("etesync.db")),
"secrets_dir": os.path.dirname(__file__),
"server_url": "http://127.0.0.1:8000/",
}
if collection is not None:
rv = self.storage_class.create_collection(collection=collection, **rv)
return rv
return inner
class TestContacts(EtesyncTests):
storage_class = EtesyncContacts
@pytest.fixture(params=["VCARD"])
def item_type(self, request):
return request.param
class TestCalendars(EtesyncTests):
storage_class = EtesyncCalendars
@pytest.fixture(params=["VEVENT"])
def item_type(self, request):
return request.param

View file

@ -1,25 +1,15 @@
from __future__ import annotations
import pytest import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args( def get_storage_args(self, request, tmpdir, slow_create_collection, baikal_server):
self, def inner(collection="test"):
request,
tmpdir,
slow_create_collection,
baikal_server,
aio_connector,
):
async def inner(collection="test"):
base_url = "http://127.0.0.1:8002/" base_url = "http://127.0.0.1:8002/"
args = { args = {
"url": base_url, "url": base_url,
"username": "baikal", "username": "baikal",
"password": "baikal", "password": "baikal",
"connector": aio_connector,
} }
if self.storage_class.fileext == ".vcf": if self.storage_class.fileext == ".vcf":
@ -28,11 +18,7 @@ class ServerMixin:
args["url"] = base_url + "cal.php/" args["url"] = base_url + "cal.php/"
if collection is not None: if collection is not None:
args = await slow_create_collection( args = slow_create_collection(self.storage_class, args, collection)
self.storage_class,
args,
collection,
)
return args return args
return inner return inner

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os import os
import uuid import uuid
@ -13,7 +11,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/", "url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
} }
except KeyError as e: except KeyError as e:
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}") pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e)))
@pytest.mark.flaky(reruns=5) @pytest.mark.flaky(reruns=5)
@ -25,11 +23,11 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
pytest.skip("No carddav") pytest.skip("No carddav")
else: else:
raise RuntimeError raise RuntimeError()
@pytest.fixture @pytest.fixture
def get_storage_args(self, davical_args, request): def get_storage_args(self, davical_args, request):
async def inner(collection="test"): def inner(collection="test"):
if collection is None: if collection is None:
return davical_args return davical_args
@ -41,8 +39,7 @@ class ServerMixin:
) )
s = self.storage_class(**args) s = self.storage_class(**args)
if not list(s.list()): if not list(s.list()):
# See: https://stackoverflow.com/a/33984811 request.addfinalizer(lambda: s.session.request("DELETE", ""))
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
return args return args
raise RuntimeError("Failed to find free collection.") raise RuntimeError("Failed to find free collection.")

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os import os
import pytest import pytest
@ -7,20 +5,16 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, slow_create_collection, aio_connector, request): def get_storage_args(self, item_type, slow_create_collection):
if ( if item_type == "VTODO":
"item_type" in request.fixturenames
and request.getfixturevalue("item_type") == "VTODO"
):
# Fastmail has non-standard support for TODOs # Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824 # See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.") pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"): def inner(collection="test"):
args = { args = {
"username": os.environ["FASTMAIL_USERNAME"], "username": os.environ["FASTMAIL_USERNAME"],
"password": os.environ["FASTMAIL_PASSWORD"], "password": os.environ["FASTMAIL_PASSWORD"],
"connector": aio_connector,
} }
if self.storage_class.fileext == ".ics": if self.storage_class.fileext == ".ics":
@ -28,15 +22,10 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
args["url"] = "https://carddav.fastmail.com/" args["url"] = "https://carddav.fastmail.com/"
else: else:
raise RuntimeError raise RuntimeError()
if collection is not None: if collection is not None:
args = await slow_create_collection( args = slow_create_collection(self.storage_class, args, collection)
self.storage_class,
args,
collection,
)
return args return args
return inner return inner

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os import os
import pytest import pytest
@ -10,10 +8,10 @@ class ServerMixin:
def get_storage_args(self, item_type, slow_create_collection): def get_storage_args(self, item_type, slow_create_collection):
if item_type != "VEVENT": if item_type != "VEVENT":
# iCloud collections can either be calendars or task lists. # iCloud collections can either be calendars or task lists.
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
pytest.skip("iCloud doesn't support anything else than VEVENT") pytest.skip("iCloud doesn't support anything else than VEVENT")
async def inner(collection="test"): def inner(collection="test"):
args = { args = {
"username": os.environ["ICLOUD_USERNAME"], "username": os.environ["ICLOUD_USERNAME"],
"password": os.environ["ICLOUD_PASSWORD"], "password": os.environ["ICLOUD_PASSWORD"],
@ -24,7 +22,7 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
args["url"] = "https://contacts.icloud.com/" args["url"] = "https://contacts.icloud.com/"
else: else:
raise RuntimeError raise RuntimeError()
if collection is not None: if collection is not None:
args = slow_create_collection(self.storage_class, args, collection) args = slow_create_collection(self.storage_class, args, collection)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest import pytest
@ -11,23 +9,17 @@ class ServerMixin:
tmpdir, tmpdir,
slow_create_collection, slow_create_collection,
radicale_server, radicale_server,
aio_connector,
): ):
async def inner(collection="test"): def inner(collection="test"):
url = "http://127.0.0.1:8001/" url = "http://127.0.0.1:8001/"
args = { args = {
"url": url, "url": url,
"username": "radicale", "username": "radicale",
"password": "radicale", "password": "radicale",
"connector": aio_connector,
} }
if collection is not None: if collection is not None:
args = await slow_create_collection( args = slow_create_collection(self.storage_class, args, collection)
self.storage_class,
args,
collection,
)
return args return args
return inner return inner

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest import pytest
@ -11,19 +9,13 @@ class ServerMixin:
tmpdir, tmpdir,
slow_create_collection, slow_create_collection,
xandikos_server, xandikos_server,
aio_connector,
): ):
async def inner(collection="test"): def inner(collection="test"):
url = "http://127.0.0.1:8000/" url = "http://127.0.0.1:8000/"
args = {"url": url, "connector": aio_connector} args = {"url": url}
if collection is not None: if collection is not None:
args = await slow_create_collection( args = slow_create_collection(self.storage_class, args, collection)
self.storage_class,
args,
collection,
)
return args return args
return inner return inner

View file

@ -1,25 +1,21 @@
from __future__ import annotations
import subprocess import subprocess
import aiostream
import pytest import pytest
from . import StorageTests
from vdirsyncer.storage.filesystem import FilesystemStorage from vdirsyncer.storage.filesystem import FilesystemStorage
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from . import StorageTests
class TestFilesystemStorage(StorageTests): class TestFilesystemStorage(StorageTests):
storage_class = FilesystemStorage storage_class = FilesystemStorage
@pytest.fixture @pytest.fixture
def get_storage_args(self, tmpdir): def get_storage_args(self, tmpdir):
async def inner(collection="test"): def inner(collection="test"):
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection} rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
if collection is not None: if collection is not None:
rv = await self.storage_class.create_collection(**rv) rv = self.storage_class.create_collection(**rv)
return rv return rv
return inner return inner
@ -30,8 +26,7 @@ class TestFilesystemStorage(StorageTests):
f.write("stub") f.write("stub")
self.storage_class(str(tmpdir) + "/hue", ".txt") self.storage_class(str(tmpdir) + "/hue", ".txt")
@pytest.mark.asyncio def test_broken_data(self, tmpdir):
async def test_broken_data(self, tmpdir):
s = self.storage_class(str(tmpdir), ".txt") s = self.storage_class(str(tmpdir), ".txt")
class BrokenItem: class BrokenItem:
@ -40,71 +35,64 @@ class TestFilesystemStorage(StorageTests):
ident = uid ident = uid
with pytest.raises(TypeError): with pytest.raises(TypeError):
await s.upload(BrokenItem) s.upload(BrokenItem)
assert not tmpdir.listdir() assert not tmpdir.listdir()
@pytest.mark.asyncio def test_ident_with_slash(self, tmpdir):
async def test_ident_with_slash(self, tmpdir):
s = self.storage_class(str(tmpdir), ".txt") s = self.storage_class(str(tmpdir), ".txt")
await s.upload(Item("UID:a/b/c")) s.upload(Item("UID:a/b/c"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
assert "/" not in item_file.basename assert "/" not in item_file.basename and item_file.isfile()
assert item_file.isfile()
@pytest.mark.asyncio def test_ignore_tmp_files(self, tmpdir):
async def test_ignore_tmp_files(self, tmpdir):
"""Test that files with .tmp suffix beside .ics files are ignored.""" """Test that files with .tmp suffix beside .ics files are ignored."""
s = self.storage_class(str(tmpdir), ".ics") s = self.storage_class(str(tmpdir), ".ics")
await s.upload(Item("UID:xyzxyz")) s.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(ext="tmp")) item_file.copy(item_file.new(ext="tmp"))
assert len(tmpdir.listdir()) == 2 assert len(tmpdir.listdir()) == 2
assert len(await aiostream.stream.list(s.list())) == 1 assert len(list(s.list())) == 1
@pytest.mark.asyncio def test_ignore_tmp_files_empty_fileext(self, tmpdir):
async def test_ignore_tmp_files_empty_fileext(self, tmpdir):
"""Test that files with .tmp suffix are ignored with empty fileext.""" """Test that files with .tmp suffix are ignored with empty fileext."""
s = self.storage_class(str(tmpdir), "") s = self.storage_class(str(tmpdir), "")
await s.upload(Item("UID:xyzxyz")) s.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(ext="tmp")) item_file.copy(item_file.new(ext="tmp"))
assert len(tmpdir.listdir()) == 2 assert len(tmpdir.listdir()) == 2
# assert False, tmpdir.listdir() # enable to see the created filename # assert False, tmpdir.listdir() # enable to see the created filename
assert len(await aiostream.stream.list(s.list())) == 1 assert len(list(s.list())) == 1
@pytest.mark.asyncio def test_ignore_files_typical_backup(self, tmpdir):
async def test_ignore_files_typical_backup(self, tmpdir):
"""Test file-name ignorance with typical backup ending ~.""" """Test file-name ignorance with typical backup ending ~."""
ignorext = "~" # without dot ignorext = "~" # without dot
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext) storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
await storage.upload(Item("UID:xyzxyz")) storage.upload(Item("UID:xyzxyz"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
item_file.copy(item_file.new(basename=item_file.basename + ignorext)) item_file.copy(item_file.new(basename=item_file.basename + ignorext))
assert len(tmpdir.listdir()) == 2 assert len(tmpdir.listdir()) == 2
assert len(await aiostream.stream.list(storage.list())) == 1 assert len(list(storage.list())) == 1
@pytest.mark.asyncio def test_too_long_uid(self, tmpdir):
async def test_too_long_uid(self, tmpdir):
storage = self.storage_class(str(tmpdir), ".txt") storage = self.storage_class(str(tmpdir), ".txt")
item = Item("UID:" + "hue" * 600) item = Item("UID:" + "hue" * 600)
href, _etag = await storage.upload(item) href, etag = storage.upload(item)
assert item.uid not in href assert item.uid not in href
@pytest.mark.asyncio def test_post_hook_inactive(self, tmpdir, monkeypatch):
async def test_post_hook_inactive(self, tmpdir, monkeypatch):
def check_call_mock(*args, **kwargs): def check_call_mock(*args, **kwargs):
raise AssertionError raise AssertionError()
monkeypatch.setattr(subprocess, "call", check_call_mock) monkeypatch.setattr(subprocess, "call", check_call_mock)
s = self.storage_class(str(tmpdir), ".txt", post_hook=None) s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
await s.upload(Item("UID:a/b/c")) s.upload(Item("UID:a/b/c"))
def test_post_hook_active(self, tmpdir, monkeypatch):
@pytest.mark.asyncio
async def test_post_hook_active(self, tmpdir, monkeypatch):
calls = [] calls = []
exe = "foo" exe = "foo"
@ -116,17 +104,14 @@ class TestFilesystemStorage(StorageTests):
monkeypatch.setattr(subprocess, "call", check_call_mock) monkeypatch.setattr(subprocess, "call", check_call_mock)
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe) s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
await s.upload(Item("UID:a/b/c")) s.upload(Item("UID:a/b/c"))
assert calls assert calls
@pytest.mark.asyncio def test_ignore_git_dirs(self, tmpdir):
async def test_ignore_git_dirs(self, tmpdir):
tmpdir.mkdir(".git").mkdir("foo") tmpdir.mkdir(".git").mkdir("foo")
tmpdir.mkdir("a") tmpdir.mkdir("a")
tmpdir.mkdir("b") tmpdir.mkdir("b")
assert {c["collection"] for c in self.storage_class.discover(str(tmpdir))} == {
expected = {"a", "b"} "a",
actual = { "b",
c["collection"] async for c in self.storage_class.discover(str(tmpdir))
} }
assert actual == expected

View file

@ -1,22 +1,13 @@
from __future__ import annotations
import aiohttp
import pytest import pytest
from aioresponses import CallbackResult from requests import Response
from aioresponses import aioresponses
from tests import normalize_item from tests import normalize_item
from vdirsyncer.exceptions import UserError from vdirsyncer.exceptions import UserError
from vdirsyncer.http import BasicAuthMethod
from vdirsyncer.http import DigestAuthMethod
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
from vdirsyncer.storage.http import HttpStorage from vdirsyncer.storage.http import HttpStorage
from vdirsyncer.storage.http import prepare_auth from vdirsyncer.storage.http import prepare_auth
@pytest.mark.asyncio def test_list(monkeypatch):
async def test_list(aio_connector):
collection_url = "http://127.0.0.1/calendar/collection.ics" collection_url = "http://127.0.0.1/calendar/collection.ics"
items = [ items = [
@ -41,68 +32,67 @@ async def test_list(aio_connector):
), ),
] ]
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2 responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2
def callback(url, headers, **kwargs): def get(self, method, url, *a, **kw):
assert headers["User-Agent"].startswith("vdirsyncer/") assert method == "GET"
assert url == collection_url
r = Response()
r.status_code = 200
assert responses assert responses
r._content = responses.pop().encode("utf-8")
r.headers["Content-Type"] = "text/calendar"
r.encoding = "ISO-8859-1"
return r
return CallbackResult( monkeypatch.setattr("requests.sessions.Session.request", get)
status=200,
body=responses.pop().encode("utf-8"),
headers={"Content-Type": "text/calendar; charset=iso-8859-1"},
)
with aioresponses() as m: s = HttpStorage(url=collection_url)
m.get(collection_url, callback=callback, repeat=True)
s = HttpStorage(url=collection_url, connector=aio_connector) found_items = {}
found_items = {} for href, etag in s.list():
item, etag2 = s.get(href)
assert item.uid is not None
assert etag2 == etag
found_items[normalize_item(item)] = href
async for href, etag in s.list(): expected = {
item, etag2 = await s.get(href) normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
assert item.uid is not None }
assert etag2 == etag
found_items[normalize_item(item)] = href
expected = { assert set(found_items) == expected
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
}
assert set(found_items) == expected for href, etag in s.list():
item, etag2 = s.get(href)
async for href, etag in s.list(): assert item.uid is not None
item, etag2 = await s.get(href) assert etag2 == etag
assert item.uid is not None assert found_items[normalize_item(item)] == href
assert etag2 == etag
assert found_items[normalize_item(item)] == href
def test_readonly_param(aio_connector): def test_readonly_param():
"""The ``readonly`` param cannot be ``False``."""
url = "http://example.com/" url = "http://example.com/"
with pytest.raises(ValueError): with pytest.raises(ValueError):
HttpStorage(url=url, read_only=False, connector=aio_connector) HttpStorage(url=url, read_only=False)
a = HttpStorage(url=url, read_only=True, connector=aio_connector) a = HttpStorage(url=url, read_only=True).read_only
b = HttpStorage(url=url, read_only=None, connector=aio_connector) b = HttpStorage(url=url, read_only=None).read_only
assert a is b is True
assert a.read_only is b.read_only is True
def test_prepare_auth(): def test_prepare_auth():
assert prepare_auth(None, "", "") is None assert prepare_auth(None, "", "") is None
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd") assert prepare_auth(None, "user", "pwd") == ("user", "pwd")
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd") assert prepare_auth("basic", "user", "pwd") == ("user", "pwd")
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
assert prepare_auth("basic", "", "pwd") assert prepare_auth("basic", "", "pwd")
assert "you need to specify username and password" in str(excinfo.value).lower() assert "you need to specify username and password" in str(excinfo.value).lower()
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod) from requests.auth import HTTPDigestAuth
assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
prepare_auth("ladida", "user", "pwd") prepare_auth("ladida", "user", "pwd")
@ -110,54 +100,24 @@ def test_prepare_auth():
assert "unknown authentication method" in str(excinfo.value).lower() assert "unknown authentication method" in str(excinfo.value).lower()
def test_prepare_auth_guess(): def test_prepare_auth_guess(monkeypatch):
# guess auth is currently not supported import requests_toolbelt.auth.guess
assert isinstance(
prepare_auth("guess", "user", "pwd"), requests_toolbelt.auth.guess.GuessAuth
)
monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
with pytest.raises(UserError) as excinfo: with pytest.raises(UserError) as excinfo:
prepare_auth("guess", "usr", "pwd") prepare_auth("guess", "user", "pwd")
assert "not supported" in str(excinfo.value).lower() assert "requests_toolbelt is too old" in str(excinfo.value).lower()
def test_verify_false_disallowed(aio_connector): def test_verify_false_disallowed():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
HttpStorage(url="http://example.com", verify=False, connector=aio_connector) HttpStorage(url="http://example.com", verify=False)
assert "must be a path to a pem-file." in str(excinfo.value).lower() assert "forbidden" in str(excinfo.value).lower()
assert "consider setting verify_fingerprint" in str(excinfo.value).lower()
@pytest.mark.asyncio
async def test_403_usage_limit_exceeded(aio_connector):
url = "http://127.0.0.1/test_403"
error_body = {
"error": {
"errors": [
{
"domain": "usageLimits",
"message": "Calendar usage limits exceeded.",
"reason": "quotaExceeded",
}
],
"code": 403,
"message": "Calendar usage limits exceeded.",
}
}
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, payload=error_body, repeat=True)
with pytest.raises(UsageLimitReached):
await request("GET", url, session)
@pytest.mark.asyncio
async def test_403_without_usage_limits_domain(aio_connector):
"""A 403 JSON error without the Google 'usageLimits' domain should not be
treated as UsageLimitReached and should surface as ClientResponseError.
"""
url = "http://127.0.0.1/test_403_no_usage_limits"
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, repeat=True)
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, session)

View file

@ -1,16 +1,11 @@
from __future__ import annotations
import aiostream
import pytest import pytest
from aioresponses import CallbackResult from requests import Response
from aioresponses import aioresponses
import vdirsyncer.storage.http import vdirsyncer.storage.http
from . import StorageTests
from vdirsyncer.storage.base import Storage from vdirsyncer.storage.base import Storage
from vdirsyncer.storage.singlefile import SingleFileStorage from vdirsyncer.storage.singlefile import SingleFileStorage
from . import StorageTests
class CombinedStorage(Storage): class CombinedStorage(Storage):
"""A subclass of HttpStorage to make testing easier. It supports writes via """A subclass of HttpStorage to make testing easier. It supports writes via
@ -19,33 +14,32 @@ class CombinedStorage(Storage):
_repr_attributes = ("url", "path") _repr_attributes = ("url", "path")
storage_name = "http_and_singlefile" storage_name = "http_and_singlefile"
def __init__(self, url, path, *, connector, **kwargs): def __init__(self, url, path, **kwargs):
if kwargs.get("collection") is not None: if kwargs.get("collection", None) is not None:
raise ValueError raise ValueError()
super().__init__(**kwargs) super().__init__(**kwargs)
self.url = url self.url = url
self.path = path self.path = path
self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector) self._reader = vdirsyncer.storage.http.HttpStorage(url=url)
self._reader._ignore_uids = False self._reader._ignore_uids = False
self._writer = SingleFileStorage(path=path) self._writer = SingleFileStorage(path=path)
async def list(self, *a, **kw): def list(self, *a, **kw):
async for item in self._reader.list(*a, **kw): return self._reader.list(*a, **kw)
yield item
async def get(self, *a, **kw): def get(self, *a, **kw):
await aiostream.stream.list(self.list()) self.list()
return await self._reader.get(*a, **kw) return self._reader.get(*a, **kw)
async def upload(self, *a, **kw): def upload(self, *a, **kw):
return await self._writer.upload(*a, **kw) return self._writer.upload(*a, **kw)
async def update(self, *a, **kw): def update(self, *a, **kw):
return await self._writer.update(*a, **kw) return self._writer.update(*a, **kw)
async def delete(self, *a, **kw): def delete(self, *a, **kw):
return await self._writer.delete(*a, **kw) return self._writer.delete(*a, **kw)
class TestHttpStorage(StorageTests): class TestHttpStorage(StorageTests):
@ -57,37 +51,28 @@ class TestHttpStorage(StorageTests):
def setup_tmpdir(self, tmpdir, monkeypatch): def setup_tmpdir(self, tmpdir, monkeypatch):
self.tmpfile = str(tmpdir.ensure("collection.txt")) self.tmpfile = str(tmpdir.ensure("collection.txt"))
def callback(url, headers, **kwargs): def _request(method, url, *args, **kwargs):
"""Read our tmpfile at request time. assert method == "GET"
assert url == "http://localhost:123/collection.txt"
assert "vdirsyncer" in kwargs["headers"]["User-Agent"]
r = Response()
r.status_code = 200
try:
with open(self.tmpfile, "rb") as f:
r._content = f.read()
except OSError:
r._content = b""
We can't just read this during test setup since the file get written to r.headers["Content-Type"] = "text/calendar"
during test execution. r.encoding = "utf-8"
return r
It might make sense to actually run a server serving the local file. monkeypatch.setattr(vdirsyncer.storage.http, "request", _request)
"""
assert headers["User-Agent"].startswith("vdirsyncer/")
with open(self.tmpfile) as f:
body = f.read()
return CallbackResult(
status=200,
body=body,
headers={"Content-Type": "text/calendar; charset=utf-8"},
)
with aioresponses() as m:
m.get("http://localhost:123/collection.txt", callback=callback, repeat=True)
yield
@pytest.fixture @pytest.fixture
def get_storage_args(self, aio_connector): def get_storage_args(self):
async def inner(collection=None): def inner(collection=None):
assert collection is None assert collection is None
return { return {"url": "http://localhost:123/collection.txt", "path": self.tmpfile}
"url": "http://localhost:123/collection.txt",
"path": self.tmpfile,
"connector": aio_connector,
}
return inner return inner

View file

@ -1,19 +1,14 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.memory import MemoryStorage
from . import StorageTests from . import StorageTests
from vdirsyncer.storage.memory import MemoryStorage
class TestMemoryStorage(StorageTests): class TestMemoryStorage(StorageTests):
storage_class = MemoryStorage storage_class = MemoryStorage
supports_collections = False supports_collections = False
@pytest.fixture @pytest.fixture
def get_storage_args(self): def get_storage_args(self):
async def inner(**args): return lambda **kw: kw
return args
return inner

View file

@ -1,22 +1,20 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.singlefile import SingleFileStorage
from . import StorageTests from . import StorageTests
from vdirsyncer.storage.singlefile import SingleFileStorage
class TestSingleFileStorage(StorageTests): class TestSingleFileStorage(StorageTests):
storage_class = SingleFileStorage storage_class = SingleFileStorage
supports_metadata = False supports_metadata = False
@pytest.fixture @pytest.fixture
def get_storage_args(self, tmpdir): def get_storage_args(self, tmpdir):
async def inner(collection="test"): def inner(collection="test"):
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection} rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
if collection is not None: if collection is not None:
rv = await self.storage_class.create_collection(**rv) rv = self.storage_class.create_collection(**rv)
return rv return rv
return inner return inner

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import pytest import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import io import io
from textwrap import dedent from textwrap import dedent
@ -9,6 +7,7 @@ from vdirsyncer import cli
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.cli.config import Config from vdirsyncer.cli.config import Config
invalid = object() invalid = object()
@ -26,7 +25,7 @@ def read_config(tmpdir, monkeypatch):
def test_read_config(read_config): def test_read_config(read_config):
_errors, c = read_config( errors, c = read_config(
""" """
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -222,62 +221,3 @@ def test_validate_collections_param():
x([["c", None, "b"]]) x([["c", None, "b"]])
x([["c", "a", None]]) x([["c", "a", None]])
x([["c", None, None]]) x([["c", None, None]])
def test_invalid_implicit_value(read_config):
expected_message = "`implicit` parameter must be 'create' or absent"
with pytest.raises(exceptions.UserError) as excinfo:
read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = null
implicit = "invalid"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert expected_message in str(excinfo.value)
def test_implicit_create_only(read_config):
"""Test that implicit create works."""
errors, c = read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = ["from a", "from b"]
implicit = "create"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert not errors
pair = c.pairs["my_pair"]
assert pair.implicit == "create"

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import json import json
from textwrap import dedent from textwrap import dedent
@ -153,7 +151,7 @@ def test_discover_direct_path(tmpdir, runner):
def test_null_collection_with_named_collection(tmpdir, runner): def test_null_collection_with_named_collection(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -161,13 +159,15 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/foo/" path = "{base}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "singlefile" type = "singlefile"
path = "{tmpdir!s}/bar.txt" path = "{base}/bar.txt"
""" """.format(
base=str(tmpdir)
)
) )
) )
@ -191,7 +191,7 @@ def test_null_collection_with_named_collection(tmpdir, runner):
@pytest.mark.parametrize( @pytest.mark.parametrize(
("a_requires", "b_requires"), "a_requires,b_requires",
[ [
(True, True), (True, True),
(True, False), (True, False),
@ -206,13 +206,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
def __init__(self, require_collection, **kw): def __init__(self, require_collection, **kw):
if require_collection: if require_collection:
assert not kw.get("collection") assert not kw.get("collection")
raise exceptions.CollectionRequired raise exceptions.CollectionRequired()
async def get(self, href: str):
raise NotImplementedError
async def list(self) -> list[tuple]:
raise NotImplementedError
from vdirsyncer.cli.utils import storage_names from vdirsyncer.cli.utils import storage_names
@ -220,7 +214,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -228,12 +222,14 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
[storage foo] [storage foo]
type = "test" type = "test"
require_collection = {json.dumps(a_requires)} require_collection = {a}
[storage bar] [storage bar]
type = "test" type = "test"
require_collection = {json.dumps(b_requires)} require_collection = {b}
""" """.format(
a=json.dumps(a_requires), b=json.dumps(b_requires)
)
) )
) )
@ -243,45 +239,3 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
assert ( assert (
"One or more storages don't support `collections = null`." in result.output "One or more storages don't support `collections = null`." in result.output
) )
def test_showconfig(tmpdir, runner):
runner.write_with_general(
dedent(
"""
[storage foo]
type = "filesystem"
path = "{0}/foo/"
fileext = ".txt"
[storage bar]
type = "filesystem"
path = "{0}/bar/"
fileext = ".txt"
[pair foobar]
a = "foo"
b = "bar"
collections = ["from a"]
"""
).format(str(tmpdir))
)
result = runner.invoke(["showconfig"])
assert not result.exception
assert json.loads(result.output) == {
"storages": [
{
"type": "filesystem",
"path": f"{tmpdir}/foo/",
"fileext": ".txt",
"instance_name": "foo",
},
{
"type": "filesystem",
"path": f"{tmpdir}/bar/",
"fileext": ".txt",
"instance_name": "bar",
},
]
}

View file

@ -1,27 +1,27 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
def test_get_password_from_command(tmpdir, runner): def test_get_password_from_command(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = ["a", "b", "c"] collections = ["a", "b", "c"]
[storage foo] [storage foo]
type.fetch = ["shell", "echo filesystem"] type = "filesystem"
path = "{tmpdir!s}/foo/" path = "{base}/foo/"
fileext.fetch = ["command", "echo", ".txt"] fileext.fetch = ["command", "echo", ".txt"]
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/bar/" path = "{base}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"] fileext.fetch = ["prompt", "Fileext for bar"]
""" """.format(
base=str(tmpdir)
)
) )
) )

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import pytest import pytest
@ -41,7 +39,7 @@ def test_basic(storage, runner, collection):
assert not result.exception assert not result.exception
assert "No UID" in result.output assert "No UID" in result.output
assert "'toobroken.txt' is malformed beyond repair" in result.output assert "'toobroken.txt' is malformed beyond repair" in result.output
(new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x)) (new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)]
assert "UID:" in new_fname.read() assert "UID:" in new_fname.read()
@ -58,7 +56,7 @@ def test_repair_uids(storage, runner, repair_uids):
else: else:
opt = ["--no-repair-unsafe-uid"] opt = ["--no-repair-unsafe-uid"]
result = runner.invoke(["repair", *opt, "foo"], input="y") result = runner.invoke(["repair"] + opt + ["foo"], input="y")
assert not result.exception assert not result.exception
if repair_uids: if repair_uids:

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import json import json
import sys import sys
from textwrap import dedent from textwrap import dedent
@ -52,6 +50,41 @@ def test_sync_inexistant_pair(tmpdir, runner):
assert "pair foo does not exist." in result.output.lower() assert "pair foo does not exist." in result.output.lower()
def test_debug_connections(tmpdir, runner):
runner.write_with_general(
dedent(
"""
[pair my_pair]
a = "my_a"
b = "my_b"
collections = null
[storage my_a]
type = "filesystem"
path = "{0}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{0}/path_b/"
fileext = ".txt"
"""
).format(str(tmpdir))
)
tmpdir.mkdir("path_a")
tmpdir.mkdir("path_b")
result = runner.invoke(["discover"])
assert not result.exception
result = runner.invoke(["-vdebug", "sync", "--max-workers=3"])
assert "using 3 maximal workers" in result.output.lower()
result = runner.invoke(["-vdebug", "sync"])
assert "using 1 maximal workers" in result.output.lower()
def test_empty_storage(tmpdir, runner): def test_empty_storage(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
@ -90,7 +123,9 @@ def test_empty_storage(tmpdir, runner):
result = runner.invoke(["sync"]) result = runner.invoke(["sync"])
lines = result.output.splitlines() lines = result.output.splitlines()
assert lines[0] == "Syncing my_pair" assert lines[0] == "Syncing my_pair"
assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.') assert lines[1].startswith(
"error: my_pair: " 'Storage "my_b" was completely emptied.'
)
assert result.exception assert result.exception
@ -278,24 +313,27 @@ def test_multiple_pairs(tmpdir, runner):
], ],
) )
def test_create_collections(collections, tmpdir, runner): def test_create_collections(collections, tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = {json.dumps(list(collections))} collections = {colls}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/foo/" path = "{base}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/bar/" path = "{base}/bar/"
fileext = ".txt" fileext = ".txt"
""" """.format(
base=str(tmpdir), colls=json.dumps(list(collections))
)
) )
) )
@ -313,7 +351,7 @@ def test_create_collections(collections, tmpdir, runner):
def test_ident_conflict(tmpdir, runner): def test_ident_conflict(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -321,14 +359,16 @@ def test_ident_conflict(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/foo/" path = "{base}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/bar/" path = "{base}/bar/"
fileext = ".txt" fileext = ".txt"
""" """.format(
base=str(tmpdir)
)
) )
) )
@ -348,17 +388,20 @@ def test_ident_conflict(tmpdir, runner):
'error: foobar: Storage "foo" contains multiple items with the ' 'error: foobar: Storage "foo" contains multiple items with the '
"same UID or even content" "same UID or even content"
) in result.output ) in result.output
assert sorted( assert (
[ sorted(
"one.txt" in result.output, [
"two.txt" in result.output, "one.txt" in result.output,
"three.txt" in result.output, "two.txt" in result.output,
] "three.txt" in result.output,
) == [False, True, True] ]
)
== [False, True, True]
)
@pytest.mark.parametrize( @pytest.mark.parametrize(
("existing", "missing"), "existing,missing",
[ [
("foo", "bar"), ("foo", "bar"),
("bar", "foo"), ("bar", "foo"),
@ -367,7 +410,7 @@ def test_ident_conflict(tmpdir, runner):
def test_unknown_storage(tmpdir, runner, existing, missing): def test_unknown_storage(tmpdir, runner, existing, missing):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -375,9 +418,11 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
[storage {existing}] [storage {existing}]
type = "filesystem" type = "filesystem"
path = "{tmpdir!s}/{existing}/" path = "{base}/{existing}/"
fileext = ".txt" fileext = ".txt"
""" """.format(
base=str(tmpdir), existing=existing
)
) )
) )
@ -387,8 +432,10 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
assert result.exception assert result.exception
assert ( assert (
f"Storage '{missing}' not found. " "Storage '{missing}' not found. "
f"These are the configured storages: ['{existing}']" "These are the configured storages: ['{existing}']".format(
missing=missing, existing=existing
)
) in result.output ) in result.output
@ -397,34 +444,36 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
runner.write_with_general("") runner.write_with_general("")
result = runner.invoke([cmd]) result = runner.invoke([cmd])
assert result.output == "" assert result.output == "critical: Nothing to do.\n"
assert not result.exception assert result.exception.code == 5
@pytest.mark.parametrize( @pytest.mark.parametrize(
("resolution", "expect_foo", "expect_bar"), "resolution,expect_foo,expect_bar",
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")], [(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
) )
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar): def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
f""" """
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = null collections = null
conflict_resolution = {json.dumps(resolution)} conflict_resolution = {val}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{tmpdir!s}/foo" path = "{base}/foo"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{tmpdir!s}/bar" path = "{base}/bar"
""" """.format(
base=str(tmpdir), val=json.dumps(resolution)
)
) )
) )
@ -516,11 +565,13 @@ def test_fetch_only_necessary_params(tmpdir, runner):
fetch_script = tmpdir.join("fetch_script") fetch_script = tmpdir.join("fetch_script")
fetch_script.write( fetch_script.write(
dedent( dedent(
f""" """
set -e set -e
touch "{fetched_file!s}" touch "{}"
echo ".txt" echo ".txt"
""" """.format(
str(fetched_file)
)
) )
) )
@ -551,7 +602,9 @@ def test_fetch_only_necessary_params(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{path}" path = "{path}"
fileext.fetch = ["command", "sh", "{script}"] fileext.fetch = ["command", "sh", "{script}"]
""".format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)) """.format(
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
)
) )
) )

View file

@ -1,7 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.cli.utils import handle_cli_error from vdirsyncer.cli.utils import handle_cli_error
from vdirsyncer.cli.utils import storage_instance_from_config from vdirsyncer.cli.utils import storage_instance_from_config
@ -14,18 +10,16 @@ def test_handle_cli_error(capsys):
except BaseException: except BaseException:
handle_cli_error() handle_cli_error()
_out, err = capsys.readouterr() out, err = capsys.readouterr()
assert "returned something vdirsyncer doesn't understand" in err assert "returned something vdirsyncer doesn't understand" in err
assert "ayy lmao" in err assert "ayy lmao" in err
@pytest.mark.asyncio def test_storage_instance_from_config(monkeypatch):
async def test_storage_instance_from_config(monkeypatch, aio_connector): def lol(**kw):
class Dummy: assert kw == {"foo": "bar", "baz": 1}
def __init__(self, **kw): return "OK"
assert kw == {"foo": "bar", "baz": 1}
monkeypatch.setitem(storage_names._storages, "lol", Dummy) monkeypatch.setitem(storage_names._storages, "lol", lol)
config = {"type": "lol", "foo": "bar", "baz": 1} config = {"type": "lol", "foo": "bar", "baz": 1}
storage = await storage_instance_from_config(config, connector=aio_connector) assert storage_instance_from_config(config) == "OK"
assert isinstance(storage, Dummy)

View file

@ -1,28 +0,0 @@
from __future__ import annotations
import ssl
import pytest
import trustme
@pytest.fixture(scope="session")
def ca():
return trustme.CA()
@pytest.fixture(scope="session")
def localhost_cert(ca):
return ca.issue_cert("localhost")
@pytest.fixture(scope="session")
def httpserver_ssl_context(localhost_cert):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
crt = localhost_cert.cert_chain_pems[0]
key = localhost_cert.private_key_pem
with crt.tempfile() as crt_file, key.tempfile() as key_file:
context.load_cert_chain(crt_file, key_file)
return context

View file

@ -1,12 +1,9 @@
from __future__ import annotations
import logging import logging
import sys
import aiohttp
import click_log import click_log
import pytest import pytest
from cryptography import x509 import requests
from cryptography.hazmat.primitives import hashes
from vdirsyncer import http from vdirsyncer import http
from vdirsyncer import utils from vdirsyncer import utils
@ -22,90 +19,50 @@ def test_get_storage_init_args():
from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.storage.memory import MemoryStorage
all, required = utils.get_storage_init_args(MemoryStorage) all, required = utils.get_storage_init_args(MemoryStorage)
assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"} assert all == {"fileext", "collection", "read_only", "instance_name"}
assert not required assert not required
@pytest.mark.asyncio def test_request_ssl():
async def test_request_ssl(): with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
async with aiohttp.ClientSession() as session: http.request("GET", "https://self-signed.badssl.com/")
with pytest.raises( assert "certificate verify failed" in str(excinfo.value)
aiohttp.ClientConnectorCertificateError,
match="certificate verify failed", http.request("GET", "https://self-signed.badssl.com/", verify=False)
):
await http.request(
"GET",
"https://self-signed.badssl.com/",
session=session,
)
@pytest.mark.xfail(reason="feature not implemented") def _fingerprints_broken():
@pytest.mark.asyncio from pkg_resources import parse_version as ver
async def test_request_unsafe_ssl():
async with aiohttp.ClientSession() as session: broken_urllib3 = ver(requests.__version__) <= ver("2.5.1")
await http.request( return broken_urllib3
"GET",
"https://self-signed.badssl.com/",
verify=False,
session=session,
)
def fingerprint_of_cert(cert, hash=hashes.SHA256) -> str: @pytest.mark.skipif(
return x509.load_pem_x509_certificate(cert.bytes()).fingerprint(hash()).hex() _fingerprints_broken(), reason="https://github.com/shazow/urllib3/issues/529"
)
@pytest.mark.parametrize(
"fingerprint",
[
"94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA",
"19:90:F7:23:94:F2:EF:AB:2B:64:2D:57:3D:25:95:2D",
],
)
def test_request_ssl_fingerprints(httpsserver, fingerprint):
httpsserver.serve_content("") # we need to serve something
http.request("GET", httpsserver.url, verify=False, verify_fingerprint=fingerprint)
with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
http.request("GET", httpsserver.url, verify_fingerprint=fingerprint)
@pytest.mark.parametrize("hash_algorithm", [hashes.SHA256]) with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
@pytest.mark.asyncio
async def test_request_ssl_leaf_fingerprint(
httpserver,
localhost_cert,
hash_algorithm,
aio_session,
):
fingerprint = fingerprint_of_cert(localhost_cert.cert_chain_pems[0], hash_algorithm)
bogus = "".join(reversed(fingerprint))
# We have to serve something:
httpserver.expect_request("/").respond_with_data("OK")
url = f"https://127.0.0.1:{httpserver.port}/"
ssl = http.prepare_verify(None, fingerprint)
await http.request("GET", url, ssl=ssl, session=aio_session)
ssl = http.prepare_verify(None, bogus)
with pytest.raises(aiohttp.ServerFingerprintMismatch):
await http.request("GET", url, ssl=ssl, session=aio_session)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize("hash_algorithm", [hashes.SHA256])
@pytest.mark.asyncio
async def test_request_ssl_ca_fingerprints(httpserver, ca, hash_algorithm, aio_session):
fingerprint = fingerprint_of_cert(ca.cert_pem)
bogus = "".join(reversed(fingerprint))
# We have to serve something:
httpserver.expect_request("/").respond_with_data("OK")
url = f"https://127.0.0.1:{httpserver.port}/"
await http.request(
"GET",
url,
verify=False,
verify_fingerprint=fingerprint,
session=aio_session,
)
with pytest.raises(aiohttp.ServerFingerprintMismatch):
http.request( http.request(
"GET", "GET",
url, httpsserver.url,
verify=False, verify=False,
verify_fingerprint=bogus, verify_fingerprint="".join(reversed(fingerprint)),
session=aio_session,
) )
assert "Fingerprints did not match" in str(excinfo.value)
def test_open_graphical_browser(monkeypatch): def test_open_graphical_browser(monkeypatch):
@ -113,7 +70,10 @@ def test_open_graphical_browser(monkeypatch):
# Just assert that this internal attribute still exists and behaves the way # Just assert that this internal attribute still exists and behaves the way
# expected # expected
assert webbrowser._tryorder is None if sys.version_info < (3, 7):
iter(webbrowser._tryorder)
else:
assert webbrowser._tryorder is None
monkeypatch.setattr("webbrowser._tryorder", []) monkeypatch.setattr("webbrowser._tryorder", [])

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os import os
from vdirsyncer.cli.config import _resolve_conflict_via_command from vdirsyncer.cli.config import _resolve_conflict_via_command

View file

@ -1,15 +1,13 @@
from __future__ import annotations
import aiostream
import pytest import pytest
from vdirsyncer.cli.discover import expand_collections from vdirsyncer.cli.discover import expand_collections
missing = object() missing = object()
@pytest.mark.parametrize( @pytest.mark.parametrize(
("shortcuts", "expected"), "shortcuts,expected",
[ [
( (
["from a"], ["from a"],
@ -134,38 +132,35 @@ missing = object()
), ),
], ],
) )
@pytest.mark.asyncio def test_expand_collections(shortcuts, expected):
async def test_expand_collections(shortcuts, expected):
config_a = {"type": "fooboo", "storage_side": "a"} config_a = {"type": "fooboo", "storage_side": "a"}
config_b = {"type": "fooboo", "storage_side": "b"} config_b = {"type": "fooboo", "storage_side": "b"}
async def get_discovered_a(): def get_discovered_a():
return { return {
"c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, "c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
"c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, "c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
"a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, "a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
} }
async def get_discovered_b(): def get_discovered_b():
return { return {
"c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, "c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
"c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, "c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
"b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, "b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
} }
async def handle_not_found(config, collection): assert (
return missing sorted(
assert sorted(
await aiostream.stream.list(
expand_collections( expand_collections(
shortcuts, shortcuts,
config_a, config_a,
config_b, config_b,
get_discovered_a, get_discovered_a,
get_discovered_b, get_discovered_b,
handle_not_found, lambda config, collection: missing,
) )
) )
) == sorted(expected) == sorted(expected)
)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from contextlib import contextmanager from contextlib import contextmanager
from unittest.mock import patch from unittest.mock import patch
@ -8,8 +6,8 @@ import pytest
from hypothesis import given from hypothesis import given
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.cli.fetchparams import STRATEGIES
from vdirsyncer.cli.fetchparams import expand_fetch_params from vdirsyncer.cli.fetchparams import expand_fetch_params
from vdirsyncer.cli.fetchparams import STRATEGIES
@pytest.fixture @pytest.fixture
@ -108,7 +106,7 @@ def test_failed_strategy(monkeypatch, value_cache):
def strategy(x): def strategy(x):
calls.append(x) calls.append(x)
raise KeyboardInterrupt raise KeyboardInterrupt()
monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)

View file

@ -1,13 +1,10 @@
from __future__ import annotations
import contextlib
import hypothesis.strategies as st import hypothesis.strategies as st
from hypothesis import assume from hypothesis import assume
from hypothesis import given from hypothesis import given
from vdirsyncer.sync.status import SqliteStatus from vdirsyncer.sync.status import SqliteStatus
status_dict_strategy = st.dictionaries( status_dict_strategy = st.dictionaries(
st.text(), st.text(),
st.tuples( st.tuples(
@ -26,13 +23,13 @@ def test_legacy_status(status_dict):
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()} hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()} hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
with contextlib.closing(SqliteStatus()) as status: status = SqliteStatus()
status.load_legacy_status(status_dict) status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict assert dict(status.to_legacy_status()) == status_dict
for ident, (meta_a, meta_b) in status_dict.items(): for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a["href"]) ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b["href"]) ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident assert ident_a == ident_b == ident

View file

@ -1,21 +1,16 @@
from __future__ import annotations
import asyncio
import contextlib
from copy import deepcopy from copy import deepcopy
import aiostream
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest import pytest
from hypothesis import assume from hypothesis import assume
from hypothesis.stateful import Bundle from hypothesis.stateful import Bundle
from hypothesis.stateful import RuleBasedStateMachine
from hypothesis.stateful import rule from hypothesis.stateful import rule
from hypothesis.stateful import RuleBasedStateMachine
from tests import blow_up from tests import blow_up
from tests import uid_strategy from tests import uid_strategy
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.storage.memory import _random_string from vdirsyncer.storage.memory import _random_string
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.sync import sync as _sync from vdirsyncer.sync import sync as _sync
from vdirsyncer.sync.exceptions import BothReadOnly from vdirsyncer.sync.exceptions import BothReadOnly
from vdirsyncer.sync.exceptions import IdentConflict from vdirsyncer.sync.exceptions import IdentConflict
@ -26,12 +21,13 @@ from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
async def sync(a, b, status, *args, **kwargs) -> None: def sync(a, b, status, *args, **kwargs):
with contextlib.closing(SqliteStatus(":memory:")) as new_status: new_status = SqliteStatus(":memory:")
new_status.load_legacy_status(status) new_status.load_legacy_status(status)
await _sync(a, b, new_status, *args, **kwargs) rv = _sync(a, b, new_status, *args, **kwargs)
status.clear() status.clear()
status.update(new_status.to_legacy_status()) status.update(new_status.to_legacy_status())
return rv
def empty_storage(x): def empty_storage(x):
@ -42,49 +38,45 @@ def items(s):
return {x[1].raw for x in s.items.values()} return {x[1].raw for x in s.items.values()}
@pytest.mark.asyncio def test_irrelevant_status():
async def test_irrelevant_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {"1": ("1", 1234, "1.ics", 2345)} status = {"1": ("1", 1234, "1.ics", 2345)}
await sync(a, b, status) sync(a, b, status)
assert not status assert not status
assert not items(a) assert not items(a)
assert not items(b) assert not items(b)
@pytest.mark.asyncio def test_missing_status():
async def test_missing_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item("asdf") item = Item("asdf")
await a.upload(item) a.upload(item)
await b.upload(item) b.upload(item)
await sync(a, b, status) sync(a, b, status)
assert len(status) == 1 assert len(status) == 1
assert items(a) == items(b) == {item.raw} assert items(a) == items(b) == {item.raw}
@pytest.mark.asyncio def test_missing_status_and_different_items():
async def test_missing_status_and_different_items():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item1 = Item("UID:1\nhaha") item1 = Item("UID:1\nhaha")
item2 = Item("UID:1\nhoho") item2 = Item("UID:1\nhoho")
await a.upload(item1) a.upload(item1)
await b.upload(item2) b.upload(item2)
with pytest.raises(SyncConflict): with pytest.raises(SyncConflict):
await sync(a, b, status) sync(a, b, status)
assert not status assert not status
await sync(a, b, status, conflict_resolution="a wins") sync(a, b, status, conflict_resolution="a wins")
assert items(a) == items(b) == {item1.raw} assert items(a) == items(b) == {item1.raw}
@pytest.mark.asyncio def test_read_only_and_prefetch():
async def test_read_only_and_prefetch():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
b.read_only = True b.read_only = True
@ -92,156 +84,147 @@ async def test_read_only_and_prefetch():
status = {} status = {}
item1 = Item("UID:1\nhaha") item1 = Item("UID:1\nhaha")
item2 = Item("UID:2\nhoho") item2 = Item("UID:2\nhoho")
await a.upload(item1) a.upload(item1)
await a.upload(item2) a.upload(item2)
await sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
await sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
assert not items(a) assert not items(a) and not items(b)
assert not items(b)
@pytest.mark.asyncio def test_partial_sync_error():
async def test_partial_sync_error():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.upload(Item("UID:0")) a.upload(Item("UID:0"))
b.read_only = True b.read_only = True
with pytest.raises(PartialSync): with pytest.raises(PartialSync):
await sync(a, b, status, partial_sync="error") sync(a, b, status, partial_sync="error")
@pytest.mark.asyncio def test_partial_sync_ignore():
async def test_partial_sync_ignore():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item0 = Item("UID:0\nhehe") item0 = Item("UID:0\nhehe")
await a.upload(item0) a.upload(item0)
await b.upload(item0) b.upload(item0)
b.read_only = True b.read_only = True
item1 = Item("UID:1\nhaha") item1 = Item("UID:1\nhaha")
await a.upload(item1) a.upload(item1)
await sync(a, b, status, partial_sync="ignore") sync(a, b, status, partial_sync="ignore")
await sync(a, b, status, partial_sync="ignore") sync(a, b, status, partial_sync="ignore")
assert items(a) == {item0.raw, item1.raw} assert items(a) == {item0.raw, item1.raw}
assert items(b) == {item0.raw} assert items(b) == {item0.raw}
@pytest.mark.asyncio def test_partial_sync_ignore2():
async def test_partial_sync_ignore2():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = await a.upload(Item("UID:0")) href, etag = a.upload(Item("UID:0"))
a.read_only = True a.read_only = True
await sync(a, b, status, partial_sync="ignore", force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(b) == items(a) == {"UID:0"} assert items(b) == items(a) == {"UID:0"}
b.items.clear() b.items.clear()
await sync(a, b, status, partial_sync="ignore", force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
await sync(a, b, status, partial_sync="ignore", force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(a) == {"UID:0"} assert items(a) == {"UID:0"}
assert not b.items assert not b.items
a.read_only = False a.read_only = False
await a.update(href, Item("UID:0\nupdated"), etag) a.update(href, Item("UID:0\nupdated"), etag)
a.read_only = True a.read_only = True
await sync(a, b, status, partial_sync="ignore", force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True)
assert items(b) == items(a) == {"UID:0\nupdated"} assert items(b) == items(a) == {"UID:0\nupdated"}
@pytest.mark.asyncio def test_upload_and_update():
async def test_upload_and_update():
a = MemoryStorage(fileext=".a") a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext=".b") b = MemoryStorage(fileext=".b")
status = {} status = {}
item = Item("UID:1") # new item 1 in a item = Item("UID:1") # new item 1 in a
await a.upload(item) a.upload(item)
await sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw} assert items(b) == items(a) == {item.raw}
item = Item("UID:1\nASDF:YES") # update of item 1 in b item = Item("UID:1\nASDF:YES") # update of item 1 in b
await b.update("1.b", item, (await b.get("1.b"))[1]) b.update("1.b", item, b.get("1.b")[1])
await sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw} assert items(b) == items(a) == {item.raw}
item2 = Item("UID:2") # new item 2 in b item2 = Item("UID:2") # new item 2 in b
await b.upload(item2) b.upload(item2)
await sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw, item2.raw} assert items(b) == items(a) == {item.raw, item2.raw}
item2 = Item("UID:2\nASDF:YES") # update of item 2 in a item2 = Item("UID:2\nASDF:YES") # update of item 2 in a
await a.update("2.a", item2, (await a.get("2.a"))[1]) a.update("2.a", item2, a.get("2.a")[1])
await sync(a, b, status) sync(a, b, status)
assert items(b) == items(a) == {item.raw, item2.raw} assert items(b) == items(a) == {item.raw, item2.raw}
@pytest.mark.asyncio def test_deletion():
async def test_deletion():
a = MemoryStorage(fileext=".a") a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext=".b") b = MemoryStorage(fileext=".b")
status = {} status = {}
item = Item("UID:1") item = Item("UID:1")
await a.upload(item) a.upload(item)
item2 = Item("UID:2") item2 = Item("UID:2")
await a.upload(item2) a.upload(item2)
await sync(a, b, status) sync(a, b, status)
await b.delete("1.b", (await b.get("1.b"))[1]) b.delete("1.b", b.get("1.b")[1])
await sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item2.raw} assert items(a) == items(b) == {item2.raw}
await a.upload(item) a.upload(item)
await sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item.raw, item2.raw} assert items(a) == items(b) == {item.raw, item2.raw}
await a.delete("1.a", (await a.get("1.a"))[1]) a.delete("1.a", a.get("1.a")[1])
await sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {item2.raw} assert items(a) == items(b) == {item2.raw}
@pytest.mark.asyncio def test_insert_hash():
async def test_insert_hash():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
item = Item("UID:1") item = Item("UID:1")
href, etag = await a.upload(item) href, etag = a.upload(item)
await sync(a, b, status) sync(a, b, status)
for d in status["1"]: for d in status["1"]:
del d["hash"] del d["hash"]
await a.update(href, Item("UID:1\nHAHA:YES"), etag) a.update(href, Item("UID:1\nHAHA:YES"), etag)
await sync(a, b, status) sync(a, b, status)
assert "hash" in status["1"][0] assert "hash" in status["1"][0] and "hash" in status["1"][1]
assert "hash" in status["1"][1]
@pytest.mark.asyncio def test_already_synced():
async def test_already_synced():
a = MemoryStorage(fileext=".a") a = MemoryStorage(fileext=".a")
b = MemoryStorage(fileext=".b") b = MemoryStorage(fileext=".b")
item = Item("UID:1") item = Item("UID:1")
await a.upload(item) a.upload(item)
await b.upload(item) b.upload(item)
status = { status = {
"1": ( "1": (
{"href": "1.a", "hash": item.hash, "etag": (await a.get("1.a"))[1]}, {"href": "1.a", "hash": item.hash, "etag": a.get("1.a")[1]},
{"href": "1.b", "hash": item.hash, "etag": (await b.get("1.b"))[1]}, {"href": "1.b", "hash": item.hash, "etag": b.get("1.b")[1]},
) )
} }
old_status = deepcopy(status) old_status = deepcopy(status)
@ -250,73 +233,69 @@ async def test_already_synced():
) )
for _ in (1, 2): for _ in (1, 2):
await sync(a, b, status) sync(a, b, status)
assert status == old_status assert status == old_status
assert items(a) == items(b) == {item.raw} assert items(a) == items(b) == {item.raw}
@pytest.mark.parametrize("winning_storage", "ab") @pytest.mark.parametrize("winning_storage", "ab")
@pytest.mark.asyncio def test_conflict_resolution_both_etags_new(winning_storage):
async def test_conflict_resolution_both_etags_new(winning_storage):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item("UID:1") item = Item("UID:1")
href_a, etag_a = await a.upload(item) href_a, etag_a = a.upload(item)
href_b, etag_b = await b.upload(item) href_b, etag_b = b.upload(item)
status = {} status = {}
await sync(a, b, status) sync(a, b, status)
assert status assert status
item_a = Item("UID:1\nitem a") item_a = Item("UID:1\nitem a")
item_b = Item("UID:1\nitem b") item_b = Item("UID:1\nitem b")
await a.update(href_a, item_a, etag_a) a.update(href_a, item_a, etag_a)
await b.update(href_b, item_b, etag_b) b.update(href_b, item_b, etag_b)
with pytest.raises(SyncConflict): with pytest.raises(SyncConflict):
await sync(a, b, status) sync(a, b, status)
await sync(a, b, status, conflict_resolution=f"{winning_storage} wins") sync(a, b, status, conflict_resolution=f"{winning_storage} wins")
assert ( assert (
items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw} items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw}
) )
@pytest.mark.asyncio def test_updated_and_deleted():
async def test_updated_and_deleted():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = await a.upload(Item("UID:1")) href_a, etag_a = a.upload(Item("UID:1"))
status = {} status = {}
await sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
((href_b, etag_b),) = await aiostream.stream.list(b.list()) ((href_b, etag_b),) = b.list()
await b.delete(href_b, etag_b) b.delete(href_b, etag_b)
updated = Item("UID:1\nupdated") updated = Item("UID:1\nupdated")
await a.update(href_a, updated, etag_a) a.update(href_a, updated, etag_a)
await sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True)
assert items(a) == items(b) == {updated.raw} assert items(a) == items(b) == {updated.raw}
@pytest.mark.asyncio def test_conflict_resolution_invalid_mode():
async def test_conflict_resolution_invalid_mode():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item_a = Item("UID:1\nitem a") item_a = Item("UID:1\nitem a")
item_b = Item("UID:1\nitem b") item_b = Item("UID:1\nitem b")
await a.upload(item_a) a.upload(item_a)
await b.upload(item_b) b.upload(item_b)
with pytest.raises(ValueError): with pytest.raises(ValueError):
await sync(a, b, {}, conflict_resolution="yolo") sync(a, b, {}, conflict_resolution="yolo")
@pytest.mark.asyncio def test_conflict_resolution_new_etags_without_changes():
async def test_conflict_resolution_new_etags_without_changes():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item("UID:1") item = Item("UID:1")
href_a, etag_a = await a.upload(item) href_a, etag_a = a.upload(item)
href_b, etag_b = await b.upload(item) href_b, etag_b = b.upload(item)
status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")} status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")}
await sync(a, b, status) sync(a, b, status)
((ident, (status_a, status_b)),) = status.items() ((ident, (status_a, status_b)),) = status.items()
assert ident == "1" assert ident == "1"
@ -326,8 +305,7 @@ async def test_conflict_resolution_new_etags_without_changes():
assert status_b["etag"] == etag_b assert status_b["etag"] == etag_b
@pytest.mark.asyncio def test_uses_get_multi(monkeypatch):
async def test_uses_get_multi(monkeypatch):
def breakdown(*a, **kw): def breakdown(*a, **kw):
raise AssertionError("Expected use of get_multi") raise AssertionError("Expected use of get_multi")
@ -335,11 +313,11 @@ async def test_uses_get_multi(monkeypatch):
old_get = MemoryStorage.get old_get = MemoryStorage.get
async def get_multi(self, hrefs): def get_multi(self, hrefs):
hrefs = list(hrefs) hrefs = list(hrefs)
get_multi_calls.append(hrefs) get_multi_calls.append(hrefs)
for href in hrefs: for href in hrefs:
item, etag = await old_get(self, href) item, etag = old_get(self, href)
yield href, item, etag yield href, item, etag
monkeypatch.setattr(MemoryStorage, "get", breakdown) monkeypatch.setattr(MemoryStorage, "get", breakdown)
@ -348,77 +326,72 @@ async def test_uses_get_multi(monkeypatch):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item("UID:1") item = Item("UID:1")
expected_href, _etag = await a.upload(item) expected_href, etag = a.upload(item)
await sync(a, b, {}) sync(a, b, {})
assert get_multi_calls == [[expected_href]] assert get_multi_calls == [[expected_href]]
@pytest.mark.asyncio def test_empty_storage_dataloss():
async def test_empty_storage_dataloss():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
await a.upload(Item("UID:1")) a.upload(Item("UID:1"))
await a.upload(Item("UID:2")) a.upload(Item("UID:2"))
status = {} status = {}
await sync(a, b, status) sync(a, b, status)
with pytest.raises(StorageEmpty): with pytest.raises(StorageEmpty):
await sync(MemoryStorage(), b, status) sync(MemoryStorage(), b, status)
with pytest.raises(StorageEmpty): with pytest.raises(StorageEmpty):
await sync(a, MemoryStorage(), status) sync(a, MemoryStorage(), status)
@pytest.mark.asyncio def test_no_uids():
async def test_no_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
await a.upload(Item("ASDF")) a.upload(Item("ASDF"))
await b.upload(Item("FOOBAR")) b.upload(Item("FOOBAR"))
status = {} status = {}
await sync(a, b, status) sync(a, b, status)
assert items(a) == items(b) == {"ASDF", "FOOBAR"} assert items(a) == items(b) == {"ASDF", "FOOBAR"}
@pytest.mark.asyncio def test_changed_uids():
async def test_changed_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = await a.upload(Item("UID:A-ONE")) href_a, etag_a = a.upload(Item("UID:A-ONE"))
_href_b, _etag_b = await b.upload(Item("UID:B-ONE")) href_b, etag_b = b.upload(Item("UID:B-ONE"))
status = {} status = {}
await sync(a, b, status) sync(a, b, status)
await a.update(href_a, Item("UID:A-TWO"), etag_a) a.update(href_a, Item("UID:A-TWO"), etag_a)
await sync(a, b, status) sync(a, b, status)
@pytest.mark.asyncio def test_both_readonly():
async def test_both_readonly():
a = MemoryStorage(read_only=True) a = MemoryStorage(read_only=True)
b = MemoryStorage(read_only=True) b = MemoryStorage(read_only=True)
assert a.read_only assert a.read_only
assert b.read_only assert b.read_only
status = {} status = {}
with pytest.raises(BothReadOnly): with pytest.raises(BothReadOnly):
await sync(a, b, status) sync(a, b, status)
@pytest.mark.asyncio def test_partial_sync_revert():
async def test_partial_sync_revert():
a = MemoryStorage(instance_name="a") a = MemoryStorage(instance_name="a")
b = MemoryStorage(instance_name="b") b = MemoryStorage(instance_name="b")
status = {} status = {}
await a.upload(Item("UID:1")) a.upload(Item("UID:1"))
await b.upload(Item("UID:2")) b.upload(Item("UID:2"))
b.read_only = True b.read_only = True
await sync(a, b, status, partial_sync="revert") sync(a, b, status, partial_sync="revert")
assert len(status) == 2 assert len(status) == 2
assert items(a) == {"UID:1", "UID:2"} assert items(a) == {"UID:1", "UID:2"}
assert items(b) == {"UID:2"} assert items(b) == {"UID:2"}
await sync(a, b, status, partial_sync="revert") sync(a, b, status, partial_sync="revert")
assert len(status) == 1 assert len(status) == 1
assert items(a) == {"UID:2"} assert items(a) == {"UID:2"}
assert items(b) == {"UID:2"} assert items(b) == {"UID:2"}
@ -426,39 +399,37 @@ async def test_partial_sync_revert():
# Check that updates get reverted # Check that updates get reverted
a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated")) a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated"))
assert items(a) == {"UID:2\nupdated"} assert items(a) == {"UID:2\nupdated"}
await sync(a, b, status, partial_sync="revert") sync(a, b, status, partial_sync="revert")
assert len(status) == 1 assert len(status) == 1
assert items(a) == {"UID:2\nupdated"} assert items(a) == {"UID:2\nupdated"}
await sync(a, b, status, partial_sync="revert") sync(a, b, status, partial_sync="revert")
assert items(a) == {"UID:2"} assert items(a) == {"UID:2"}
# Check that deletions get reverted # Check that deletions get reverted
a.items.clear() a.items.clear()
await sync(a, b, status, partial_sync="revert", force_delete=True) sync(a, b, status, partial_sync="revert", force_delete=True)
await sync(a, b, status, partial_sync="revert", force_delete=True) sync(a, b, status, partial_sync="revert", force_delete=True)
assert items(a) == {"UID:2"} assert items(a) == {"UID:2"}
@pytest.mark.parametrize("sync_inbetween", [True, False]) @pytest.mark.parametrize("sync_inbetween", (True, False))
@pytest.mark.asyncio def test_ident_conflict(sync_inbetween):
async def test_ident_conflict(sync_inbetween):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href_a, etag_a = await a.upload(Item("UID:aaa")) href_a, etag_a = a.upload(Item("UID:aaa"))
href_b, etag_b = await a.upload(Item("UID:bbb")) href_b, etag_b = a.upload(Item("UID:bbb"))
if sync_inbetween: if sync_inbetween:
await sync(a, b, status) sync(a, b, status)
await a.update(href_a, Item("UID:xxx"), etag_a) a.update(href_a, Item("UID:xxx"), etag_a)
await a.update(href_b, Item("UID:xxx"), etag_b) a.update(href_b, Item("UID:xxx"), etag_b)
with pytest.raises(IdentConflict): with pytest.raises(IdentConflict):
await sync(a, b, status) sync(a, b, status)
@pytest.mark.asyncio def test_moved_href():
async def test_moved_href():
""" """
Concrete application: ppl_ stores contact aliases in filenames, which means Concrete application: ppl_ stores contact aliases in filenames, which means
item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but
@ -469,8 +440,8 @@ async def test_moved_href():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
_href, _etag = await a.upload(Item("UID:haha")) href, etag = a.upload(Item("UID:haha"))
await sync(a, b, status) sync(a, b, status)
b.items["lol"] = b.items.pop("haha") b.items["lol"] = b.items.pop("haha")
@ -480,7 +451,7 @@ async def test_moved_href():
# No actual sync actions # No actual sync actions
a.delete = a.update = a.upload = b.delete = b.update = b.upload = blow_up a.delete = a.update = a.upload = b.delete = b.update = b.upload = blow_up
await sync(a, b, status) sync(a, b, status)
assert len(status) == 1 assert len(status) == 1
assert items(a) == items(b) == {"UID:haha"} assert items(a) == items(b) == {"UID:haha"}
assert status["haha"][1]["href"] == "lol" assert status["haha"][1]["href"] == "lol"
@ -489,13 +460,12 @@ async def test_moved_href():
# Further sync should be a noop. Not even prefetching should occur. # Further sync should be a noop. Not even prefetching should occur.
b.get_multi = blow_up b.get_multi = blow_up
await sync(a, b, status) sync(a, b, status)
assert old_status == status assert old_status == status
assert items(a) == items(b) == {"UID:haha"} assert items(a) == items(b) == {"UID:haha"}
@pytest.mark.asyncio def test_bogus_etag_change():
async def test_bogus_etag_change():
"""Assert that sync algorithm is resilient against etag changes if content """Assert that sync algorithm is resilient against etag changes if content
didn\'t change. didn\'t change.
@ -505,33 +475,27 @@ async def test_bogus_etag_change():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href_a, etag_a = await a.upload(Item("UID:ASDASD")) href_a, etag_a = a.upload(Item("UID:ASDASD"))
await sync(a, b, status) sync(a, b, status)
assert ( assert len(status) == len(list(a.list())) == len(list(b.list())) == 1
len(status)
== len(await aiostream.stream.list(a.list()))
== len(await aiostream.stream.list(b.list()))
== 1
)
((href_b, etag_b),) = await aiostream.stream.list(b.list()) ((href_b, etag_b),) = b.list()
await a.update(href_a, Item("UID:ASDASD"), etag_a) a.update(href_a, Item("UID:ASDASD"), etag_a)
await b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b) b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b)
b.delete = b.update = b.upload = blow_up b.delete = b.update = b.upload = blow_up
await sync(a, b, status) sync(a, b, status)
assert len(status) == 1 assert len(status) == 1
assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"} assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"}
@pytest.mark.asyncio def test_unicode_hrefs():
async def test_unicode_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
_href, _etag = await a.upload(Item("UID:äää")) href, etag = a.upload(Item("UID:äää"))
await sync(a, b, status) sync(a, b, status)
class ActionIntentionallyFailed(Exception): class ActionIntentionallyFailed(Exception):
@ -539,7 +503,7 @@ class ActionIntentionallyFailed(Exception):
def action_failure(*a, **kw): def action_failure(*a, **kw):
raise ActionIntentionallyFailed raise ActionIntentionallyFailed()
class SyncMachine(RuleBasedStateMachine): class SyncMachine(RuleBasedStateMachine):
@ -547,13 +511,12 @@ class SyncMachine(RuleBasedStateMachine):
Storage = Bundle("storage") Storage = Bundle("storage")
@rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans()) @rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans())
@pytest.mark.asyncio
def newstorage(self, flaky_etags, null_etag_on_upload): def newstorage(self, flaky_etags, null_etag_on_upload):
s = MemoryStorage() s = MemoryStorage()
if flaky_etags: if flaky_etags:
async def get(href): def get(href):
_old_etag, item = s.items[href] old_etag, item = s.items[href]
etag = _random_string() etag = _random_string()
s.items[href] = etag, item s.items[href] = etag, item
return item, etag return item, etag
@ -563,15 +526,8 @@ class SyncMachine(RuleBasedStateMachine):
if null_etag_on_upload: if null_etag_on_upload:
_old_upload = s.upload _old_upload = s.upload
_old_update = s.update _old_update = s.update
s.upload = lambda item: (_old_upload(item)[0], "NULL")
async def upload(item): s.update = lambda h, i, e: _old_update(h, i, e) and "NULL"
return (await _old_upload(item))[0], "NULL"
async def update(href, item, etag):
return await _old_update(href, item, etag) and "NULL"
s.upload = upload
s.update = update
return s return s
@ -591,11 +547,11 @@ class SyncMachine(RuleBasedStateMachine):
_old_upload = s.upload _old_upload = s.upload
_old_update = s.update _old_update = s.update
async def upload(item): def upload(item):
return (await _old_upload(item))[0], None return _old_upload(item)[0], None
async def update(href, item, etag): def update(href, item, etag):
return await _old_update(href, item, etag) _old_update(href, item, etag)
s.upload = upload s.upload = upload
s.update = update s.update = update
@ -634,71 +590,66 @@ class SyncMachine(RuleBasedStateMachine):
with_error_callback, with_error_callback,
partial_sync, partial_sync,
): ):
async def inner(): assume(a is not b)
assume(a is not b) old_items_a = items(a)
old_items_a = items(a) old_items_b = items(b)
old_items_b = items(b)
a.instance_name = "a" a.instance_name = "a"
b.instance_name = "b" b.instance_name = "b"
errors = [] errors = []
error_callback = errors.append if with_error_callback else None if with_error_callback:
error_callback = errors.append
else:
error_callback = None
try: try:
# If one storage is read-only, double-sync because changes don't # If one storage is read-only, double-sync because changes don't
# get reverted immediately. # get reverted immediately.
for _ in range(2 if a.read_only or b.read_only else 1): for _ in range(2 if a.read_only or b.read_only else 1):
await sync( sync(
a, a,
b, b,
status, status,
force_delete=force_delete, force_delete=force_delete,
conflict_resolution=conflict_resolution, conflict_resolution=conflict_resolution,
error_callback=error_callback, error_callback=error_callback,
partial_sync=partial_sync, partial_sync=partial_sync,
)
for e in errors:
raise e
except PartialSync:
assert partial_sync == "error"
except ActionIntentionallyFailed:
pass
except BothReadOnly:
assert a.read_only
assert b.read_only
assume(False)
except StorageEmpty:
if force_delete:
raise
else:
not_a = not await aiostream.stream.list(a.list())
not_b = not await aiostream.stream.list(b.list())
assert not_a or not_b
else:
items_a = items(a)
items_b = items(b)
assert items_a == items_b or partial_sync == "ignore"
assert items_a == old_items_a or not a.read_only
assert items_b == old_items_b or not b.read_only
assert (
set(a.items) | set(b.items) == set(status)
or partial_sync == "ignore"
) )
asyncio.run(inner()) for e in errors:
raise e
except PartialSync:
assert partial_sync == "error"
except ActionIntentionallyFailed:
pass
except BothReadOnly:
assert a.read_only and b.read_only
assume(False)
except StorageEmpty:
if force_delete:
raise
else:
assert not list(a.list()) or not list(b.list())
else:
items_a = items(a)
items_b = items(b)
assert items_a == items_b or partial_sync == "ignore"
assert items_a == old_items_a or not a.read_only
assert items_b == old_items_b or not b.read_only
assert (
set(a.items) | set(b.items) == set(status) or partial_sync == "ignore"
)
TestSyncMachine = SyncMachine.TestCase TestSyncMachine = SyncMachine.TestCase
@pytest.mark.parametrize("error_callback", [True, False]) @pytest.mark.parametrize("error_callback", [True, False])
@pytest.mark.asyncio def test_rollback(error_callback):
async def test_rollback(error_callback):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
@ -711,7 +662,7 @@ async def test_rollback(error_callback):
if error_callback: if error_callback:
errors = [] errors = []
await sync( sync(
a, a,
b, b,
status=status, status=status,
@ -726,22 +677,16 @@ async def test_rollback(error_callback):
assert status["1"] assert status["1"]
else: else:
with pytest.raises(ActionIntentionallyFailed): with pytest.raises(ActionIntentionallyFailed):
await sync(a, b, status=status, conflict_resolution="a wins") sync(a, b, status=status, conflict_resolution="a wins")
@pytest.mark.asyncio def test_duplicate_hrefs():
async def test_duplicate_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
a.list = lambda: [("a", "a")] * 3
async def fake_list():
for item in [("a", "a")] * 3:
yield item
a.list = fake_list
a.items["a"] = ("a", Item("UID:a")) a.items["a"] = ("a", Item("UID:a"))
status = {} status = {}
await sync(a, b, status) sync(a, b, status)
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
await sync(a, b, status) sync(a, b, status)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from vdirsyncer import exceptions from vdirsyncer import exceptions

View file

@ -1,140 +1,116 @@
from __future__ import annotations
import asyncio
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest import pytest
import pytest_asyncio
from hypothesis import example from hypothesis import example
from hypothesis import given from hypothesis import given
from tests import blow_up from tests import blow_up
from vdirsyncer.exceptions import UserError from vdirsyncer.exceptions import UserError
from vdirsyncer.metasync import MetaSyncConflict
from vdirsyncer.metasync import logger from vdirsyncer.metasync import logger
from vdirsyncer.metasync import metasync from vdirsyncer.metasync import metasync
from vdirsyncer.metasync import MetaSyncConflict
from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.storage.base import normalize_meta_value
from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.storage.memory import MemoryStorage
@pytest.mark.asyncio def test_irrelevant_status():
async def test_irrelevant_status():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {"foo": "bar"} status = {"foo": "bar"}
await metasync(a, b, status, keys=()) metasync(a, b, status, keys=())
assert not status assert not status
@pytest.mark.asyncio def test_basic(monkeypatch):
async def test_basic(monkeypatch):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", None) a.set_meta("foo", "bar")
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") is None assert a.get_meta("foo") == b.get_meta("foo") == "bar"
assert await b.get_meta("foo") is None
await a.set_meta("foo", "bar") a.set_meta("foo", "baz")
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") == await b.get_meta("foo") == "bar" assert a.get_meta("foo") == b.get_meta("foo") == "baz"
await a.set_meta("foo", "baz")
await metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") == await b.get_meta("foo") == "baz"
monkeypatch.setattr(a, "set_meta", blow_up) monkeypatch.setattr(a, "set_meta", blow_up)
monkeypatch.setattr(b, "set_meta", blow_up) monkeypatch.setattr(b, "set_meta", blow_up)
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") == await b.get_meta("foo") == "baz" assert a.get_meta("foo") == b.get_meta("foo") == "baz"
monkeypatch.undo() monkeypatch.undo()
monkeypatch.undo() monkeypatch.undo()
await b.set_meta("foo", None) b.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
assert not await a.get_meta("foo") assert not a.get_meta("foo") and not b.get_meta("foo")
assert not await b.get_meta("foo")
@pytest_asyncio.fixture @pytest.fixture
async def conflict_state(request): def conflict_state(request):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", "bar") a.set_meta("foo", "bar")
await b.set_meta("foo", "baz") b.set_meta("foo", "baz")
async def do_cleanup(): def cleanup():
assert await a.get_meta("foo") == "bar" assert a.get_meta("foo") == "bar"
assert await b.get_meta("foo") == "baz" assert b.get_meta("foo") == "baz"
assert not status assert not status
request.addfinalizer(lambda: asyncio.run(do_cleanup())) request.addfinalizer(cleanup)
return a, b, status return a, b, status
@pytest_asyncio.fixture def test_conflict(conflict_state):
async def test_conflict(conflict_state):
a, b, status = conflict_state a, b, status = conflict_state
with pytest.raises(MetaSyncConflict): with pytest.raises(MetaSyncConflict):
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
@pytest.mark.asyncio def test_invalid_conflict_resolution(conflict_state):
async def test_invalid_conflict_resolution(conflict_state):
a, b, status = conflict_state a, b, status = conflict_state
with pytest.raises(UserError) as excinfo: with pytest.raises(UserError) as excinfo:
await metasync(a, b, status, keys=["foo"], conflict_resolution="foo") metasync(a, b, status, keys=["foo"], conflict_resolution="foo")
assert "Invalid conflict resolution setting" in str(excinfo.value) assert "Invalid conflict resolution setting" in str(excinfo.value)
@pytest.mark.asyncio def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch):
async def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch):
a, b, status = conflict_state a, b, status = conflict_state
warnings = [] warnings = []
monkeypatch.setattr(logger, "warning", warnings.append) monkeypatch.setattr(logger, "warning", warnings.append)
with pytest.raises(MetaSyncConflict): with pytest.raises(MetaSyncConflict):
await metasync( metasync(a, b, status, keys=["foo"], conflict_resolution=lambda *a, **kw: None)
a,
b,
status,
keys=["foo"],
conflict_resolution=lambda *a, **kw: None,
)
assert warnings == ["Custom commands don't work on metasync."] assert warnings == ["Custom commands don't work on metasync."]
@pytest.mark.asyncio def test_conflict_same_content():
async def test_conflict_same_content():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", "bar") a.set_meta("foo", "bar")
await b.set_meta("foo", "bar") b.set_meta("foo", "bar")
await metasync(a, b, status, keys=["foo"]) metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") == await b.get_meta("foo") == status["foo"] == "bar" assert a.get_meta("foo") == b.get_meta("foo") == status["foo"] == "bar"
@pytest.mark.parametrize("wins", "ab") @pytest.mark.parametrize("wins", "ab")
@pytest.mark.asyncio def test_conflict_x_wins(wins):
async def test_conflict_x_wins(wins):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", "bar") a.set_meta("foo", "bar")
await b.set_meta("foo", "baz") b.set_meta("foo", "baz")
await metasync( metasync(
a, a,
b, b,
status, status,
@ -143,8 +119,8 @@ async def test_conflict_x_wins(wins):
) )
assert ( assert (
await a.get_meta("foo") a.get_meta("foo")
== await b.get_meta("foo") == b.get_meta("foo")
== status["foo"] == status["foo"]
== ("bar" if wins == "a" else "baz") == ("bar" if wins == "a" else "baz")
) )
@ -172,8 +148,7 @@ metadata = st.dictionaries(keys, values)
keys={"0"}, keys={"0"},
conflict_resolution="a wins", conflict_resolution="a wins",
) )
@pytest.mark.asyncio def test_fuzzing(a, b, status, keys, conflict_resolution):
async def test_fuzzing(a, b, status, keys, conflict_resolution):
def _get_storage(m, instance_name): def _get_storage(m, instance_name):
s = MemoryStorage(instance_name=instance_name) s = MemoryStorage(instance_name=instance_name)
s.metadata = m s.metadata = m
@ -184,13 +159,13 @@ async def test_fuzzing(a, b, status, keys, conflict_resolution):
winning_storage = a if conflict_resolution == "a wins" else b winning_storage = a if conflict_resolution == "a wins" else b
expected_values = { expected_values = {
key: await winning_storage.get_meta(key) for key in keys if key not in status key: winning_storage.get_meta(key) for key in keys if key not in status
} }
await metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution) metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution)
for key in keys: for key in keys:
s = status.get(key) s = status.get(key, "")
assert await a.get_meta(key) == await b.get_meta(key) == s assert a.get_meta(key) == b.get_meta(key) == s
if expected_values.get(key) and s: if expected_values.get(key, "") and s:
assert s == expected_values[key] assert s == expected_values[key]

View file

@ -1,9 +1,6 @@
from __future__ import annotations
import aiostream
import pytest import pytest
from hypothesis import HealthCheck
from hypothesis import given from hypothesis import given
from hypothesis import HealthCheck
from hypothesis import settings from hypothesis import settings
from tests import uid_strategy from tests import uid_strategy
@ -17,48 +14,43 @@ from vdirsyncer.vobject import Item
@given(uid=uid_strategy) @given(uid=uid_strategy)
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=list(HealthCheck)) @settings(suppress_health_check=HealthCheck.all())
@pytest.mark.asyncio def test_repair_uids(uid):
async def test_repair_uids(uid):
s = MemoryStorage() s = MemoryStorage()
s.items = { s.items = {
"one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")), "one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")),
"two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")), "two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")),
} }
uid1, uid2 = [(await s.get(href))[0].uid async for href, etag in s.list()] uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2 assert uid1 == uid2
await repair_storage(s, repair_unsafe_uid=False) repair_storage(s, repair_unsafe_uid=False)
uid1, uid2 = [ uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
(await s.get(href))[0].uid
for href, etag in await aiostream.stream.list(s.list())
]
assert uid1 != uid2 assert uid1 != uid2
@given(uid=uid_strategy.filter(lambda x: not href_safe(x))) @given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=list(HealthCheck)) @settings(suppress_health_check=HealthCheck.all())
@pytest.mark.asyncio def test_repair_unsafe_uids(uid):
async def test_repair_unsafe_uids(uid):
s = MemoryStorage() s = MemoryStorage()
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
href, _etag = await s.upload(item) href, etag = s.upload(item)
assert (await s.get(href))[0].uid == uid assert s.get(href)[0].uid == uid
assert not href_safe(uid) assert not href_safe(uid)
await repair_storage(s, repair_unsafe_uid=True) repair_storage(s, repair_unsafe_uid=True)
new_href = (await aiostream.stream.list(s.list()))[0][0] new_href = list(s.list())[0][0]
assert href_safe(new_href) assert href_safe(new_href)
newuid = (await s.get(new_href))[0].uid newuid = s.get(new_href)[0].uid
assert href_safe(newuid) assert href_safe(newuid)
@pytest.mark.parametrize( @pytest.mark.parametrize(
("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")] "uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
) )
def test_repair_unsafe_href(uid, href): def test_repair_unsafe_href(uid, href):
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")

View file

@ -1,136 +0,0 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock
from unittest.mock import Mock
import aiohttp
import pytest
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
async def _create_mock_response(status: int, body: str | dict):
raw_body = body
text_body = json.dumps(body) if isinstance(body, dict) else body
mock_response = AsyncMock()
mock_response.status = status
mock_response.ok = 200 <= status < 300
mock_response.reason = "OK" if mock_response.ok else "Forbidden"
mock_response.headers = (
{"Content-Type": "application/json"}
if isinstance(raw_body, dict)
else {"Content-Type": "text/plain"}
)
mock_response.text.return_value = text_body
if isinstance(raw_body, dict):
mock_response.json.return_value = raw_body
else:
mock_response.json.side_effect = ValueError("Not JSON")
mock_response.raise_for_status = Mock(
side_effect=(
aiohttp.ClientResponseError(
request_info=AsyncMock(),
history=(),
status=status,
message=mock_response.reason,
headers=mock_response.headers,
)
if not mock_response.ok
else None
)
)
return mock_response
@pytest.mark.asyncio
async def test_request_retry_on_usage_limit():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate (max_retries - 1) 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries - 1)
] + [await _create_mock_response(200, "OK")]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
response = await request("GET", url, mock_session)
assert response.status == 200
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_retry_exceeds_max_attempts():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate max_retries 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries)
]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
with pytest.raises(UsageLimitReached):
await request("GET", url, mock_session)
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_json():
url = "http://example.com/api"
mock_session = AsyncMock()
# Generic non-Google 403 error payload (e.g., GitHub-style)
mock_session.request.side_effect = [
await _create_mock_response(403, {"message": "API rate limit exceeded"})
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because it's not the Google quotaExceeded shape
assert mock_session.request.call_count == 1
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_text():
url = "http://example.com/api"
mock_session = AsyncMock()
# Plain-text 403 body mentioning rate limits, but not structured as Google error
mock_session.request.side_effect = [
await _create_mock_response(403, "Rate limit exceeded")
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because the JSON shape is not Google quotaExceeded
assert mock_session.request.call_count == 1

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import hypothesis.strategies as st import hypothesis.strategies as st
@ -7,16 +5,17 @@ import pytest
from hypothesis import assume from hypothesis import assume
from hypothesis import given from hypothesis import given
from hypothesis.stateful import Bundle from hypothesis.stateful import Bundle
from hypothesis.stateful import RuleBasedStateMachine
from hypothesis.stateful import rule from hypothesis.stateful import rule
from hypothesis.stateful import RuleBasedStateMachine
import vdirsyncer.vobject as vobject import vdirsyncer.vobject as vobject
from tests import BARE_EVENT_TEMPLATE from tests import BARE_EVENT_TEMPLATE
from tests import EVENT_TEMPLATE from tests import EVENT_TEMPLATE
from tests import EVENT_WITH_TIMEZONE_TEMPLATE from tests import EVENT_WITH_TIMEZONE_TEMPLATE
from tests import VCARD_TEMPLATE
from tests import normalize_item from tests import normalize_item
from tests import uid_strategy from tests import uid_strategy
from tests import VCARD_TEMPLATE
_simple_split = [ _simple_split = [
VCARD_TEMPLATE.format(r=123, uid=123), VCARD_TEMPLATE.format(r=123, uid=123),
@ -25,7 +24,7 @@ _simple_split = [
] ]
_simple_joined = "\r\n".join( _simple_joined = "\r\n".join(
["BEGIN:VADDRESSBOOK", *_simple_split, "END:VADDRESSBOOK\r\n"] ["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"]
) )
@ -124,7 +123,7 @@ def test_split_collection_timezones():
"END:VTIMEZONE" "END:VTIMEZONE"
) )
full = "\r\n".join(["BEGIN:VCALENDAR", *items, timezone, "END:VCALENDAR"]) full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"])
given = {normalize_item(item) for item in vobject.split_collection(full)} given = {normalize_item(item) for item in vobject.split_collection(full)}
expected = { expected = {
@ -154,7 +153,7 @@ def test_hash_item():
def test_multiline_uid(benchmark): def test_multiline_uid(benchmark):
a = "BEGIN:FOO\r\nUID:123456789abcd\r\n efgh\r\nEND:FOO\r\n" a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n"
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh" assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
@ -237,31 +236,6 @@ def test_broken_item():
assert item.parsed is None assert item.parsed is None
def test_mismatched_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"END:BAR",
]
)
assert "Got END:BAR, expected END:FOO at line 2" in str(excinfo.value)
def test_missing_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"BEGIN:BAR",
"END:BAR",
]
)
assert "Missing END for component(s): FOO" in str(excinfo.value)
def test_multiple_items(): def test_multiple_items():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
vobject._Component.parse( vobject._Component.parse(
@ -299,7 +273,7 @@ def test_input_types():
value_strategy = st.text( value_strategy = st.text(
st.characters( st.characters(
exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), exclude_characters=":=" blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":="
), ),
min_size=1, min_size=1,
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)
@ -335,8 +309,7 @@ class VobjectMachine(RuleBasedStateMachine):
assert key in c assert key in c
assert c.get(key) == value assert c.get(key) == value
dump = "\r\n".join(c.dump_lines()) dump = "\r\n".join(c.dump_lines())
assert key in dump assert key in dump and value in dump
assert value in dump
@rule( @rule(
c=Parsed, c=Parsed,
@ -366,16 +339,6 @@ class VobjectMachine(RuleBasedStateMachine):
TestVobjectMachine = VobjectMachine.TestCase TestVobjectMachine = VobjectMachine.TestCase
def test_dupe_consecutive_keys():
state = VobjectMachine()
unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
parsed_0 = state.parse(unparsed=unparsed_0)
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop(c=parsed_0, key="0", value="1")
state.teardown()
def test_component_contains(): def test_component_contains():
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"]) item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
@ -383,4 +346,4 @@ def test_component_contains():
assert "BAZ" not in item assert "BAZ" not in item
with pytest.raises(ValueError): with pytest.raises(ValueError):
42 in item # noqa: B015, this check raises. 42 in item # noqa: B015

View file

@ -2,14 +2,13 @@
Vdirsyncer synchronizes calendars and contacts. Vdirsyncer synchronizes calendars and contacts.
""" """
from __future__ import annotations
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
BUGTRACKER_HOME = PROJECT_HOME + "/issues" BUGTRACKER_HOME = PROJECT_HOME + "/issues"
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable" DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
try: try:
from .version import version as __version__ from .version import version as __version__ # noqa
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
raise ImportError( raise ImportError(
"Failed to find (autogenerated) version.py. " "Failed to find (autogenerated) version.py. "
@ -17,14 +16,12 @@ except ImportError: # pragma: no cover
"use the PyPI ones." "use the PyPI ones."
) )
__all__ = ["__version__"]
def _check_python_version(): # pragma: no cover
def _check_python_version():
import sys import sys
if sys.version_info < (3, 9, 0): # noqa: UP036 if sys.version_info < (3, 7, 0):
print("vdirsyncer requires at least Python 3.9.") print("vdirsyncer requires at least Python 3.7.")
sys.exit(1) sys.exit(1)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
if __name__ == "__main__": if __name__ == "__main__":
from vdirsyncer.cli import app from vdirsyncer.cli import app

View file

@ -1,24 +1,17 @@
from __future__ import annotations
import asyncio
import functools import functools
import json
import logging import logging
import sys import sys
import aiohttp
import click import click
import click_log import click_log
from vdirsyncer import BUGTRACKER_HOME from .. import __version__
from vdirsyncer import __version__ from .. import BUGTRACKER_HOME
cli_logger = logging.getLogger(__name__) cli_logger = logging.getLogger(__name__)
click_log.basic_config("vdirsyncer") click_log.basic_config("vdirsyncer")
# add short option for the help option
click_context_settings = {"help_option_names": ["-h", "--help"]}
class AppContext: class AppContext:
def __init__(self): def __init__(self):
@ -44,13 +37,13 @@ def catch_errors(f):
return inner return inner
@click.group(context_settings=click_context_settings) @click.group()
@click_log.simple_verbosity_option("vdirsyncer") @click_log.simple_verbosity_option("vdirsyncer")
@click.version_option(version=__version__) @click.version_option(version=__version__)
@click.option("--config", "-c", metavar="FILE", help="Config file to use.") @click.option("--config", "-c", metavar="FILE", help="Config file to use.")
@pass_context @pass_context
@catch_errors @catch_errors
def app(ctx, config: str): def app(ctx, config):
""" """
Synchronize calendars and contacts Synchronize calendars and contacts
""" """
@ -59,7 +52,7 @@ def app(ctx, config: str):
cli_logger.warning( cli_logger.warning(
"Vdirsyncer currently does not support Windows. " "Vdirsyncer currently does not support Windows. "
"You will likely encounter bugs. " "You will likely encounter bugs. "
f"See {BUGTRACKER_HOME}/535 for more information." "See {}/535 for more information.".format(BUGTRACKER_HOME)
) )
if not ctx.config: if not ctx.config:
@ -68,6 +61,36 @@ def app(ctx, config: str):
ctx.config = load_config(config) ctx.config = load_config(config)
main = app
def max_workers_callback(ctx, param, value):
if value == 0 and logging.getLogger("vdirsyncer").level == logging.DEBUG:
value = 1
cli_logger.debug(f"Using {value} maximal workers.")
return value
def max_workers_option(default=0):
help = "Use at most this many connections. "
if default == 0:
help += (
'The default is 0, which means "as many as necessary". '
"With -vdebug enabled, the default is 1."
)
else:
help += f"The default is {default}."
return click.option(
"--max-workers",
default=default,
type=click.IntRange(min=0, max=None),
callback=max_workers_callback,
help=help,
)
def collections_arg_callback(ctx, param, value): def collections_arg_callback(ctx, param, value):
""" """
Expand the various CLI shortforms ("pair, pair/collection") to an iterable Expand the various CLI shortforms ("pair, pair/collection") to an iterable
@ -102,9 +125,10 @@ collections_arg = click.argument(
"to be deleted from both sides." "to be deleted from both sides."
), ),
) )
@max_workers_option()
@pass_context @pass_context
@catch_errors @catch_errors
def sync(ctx, collections, force_delete): def sync(ctx, collections, force_delete, max_workers):
""" """
Synchronize the given collections or pairs. If no arguments are given, all Synchronize the given collections or pairs. If no arguments are given, all
will be synchronized. will be synchronized.
@ -125,75 +149,54 @@ def sync(ctx, collections, force_delete):
# Sync only "first_collection" from the pair "bob" # Sync only "first_collection" from the pair "bob"
vdirsyncer sync bob/first_collection vdirsyncer sync bob/first_collection
""" """
from .tasks import prepare_pair from .tasks import prepare_pair, sync_collection
from .tasks import sync_collection from .utils import WorkerQueue
async def main(collection_names): wq = WorkerQueue(max_workers)
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
tasks = [] with wq.join():
for pair_name, collections in collection_names: for pair_name, collections in collections:
async for collection, config in prepare_pair( wq.put(
functools.partial(
prepare_pair,
pair_name=pair_name, pair_name=pair_name,
collections=collections, collections=collections,
config=ctx.config, config=ctx.config,
connector=conn, force_delete=force_delete,
): callback=sync_collection,
tasks.append( )
sync_collection( )
collection=collection, wq.spawn_worker()
general=config,
force_delete=force_delete,
connector=conn,
)
)
# `return_exceptions=True` ensures that the event loop lives long enough for
# backoffs to be able to finish
gathered = await asyncio.gather(*tasks, return_exceptions=True)
# but now we need to manually check for and propogate a single failure after
# allowing all tasks to finish in order to keep exit status non-zero
failures = [e for e in gathered if isinstance(e, BaseException)]
if failures:
raise failures[0]
asyncio.run(main(collections))
@app.command() @app.command()
@collections_arg @collections_arg
@max_workers_option()
@pass_context @pass_context
@catch_errors @catch_errors
def metasync(ctx, collections): def metasync(ctx, collections, max_workers):
""" """
Synchronize metadata of the given collections or pairs. Synchronize metadata of the given collections or pairs.
See the `sync` command for usage. See the `sync` command for usage.
""" """
from .tasks import metasync_collection from .tasks import prepare_pair, metasync_collection
from .tasks import prepare_pair from .utils import WorkerQueue
async def main(collection_names): wq = WorkerQueue(max_workers)
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
for pair_name, collections in collection_names: with wq.join():
collections = prepare_pair( for pair_name, collections in collections:
wq.put(
functools.partial(
prepare_pair,
pair_name=pair_name, pair_name=pair_name,
collections=collections, collections=collections,
config=ctx.config, config=ctx.config,
connector=conn, callback=metasync_collection,
) )
)
await asyncio.gather( wq.spawn_worker()
*[
metasync_collection(
collection=collection,
general=config,
connector=conn,
)
async for collection, config in collections
]
)
asyncio.run(main(collections))
@app.command() @app.command()
@ -206,28 +209,33 @@ def metasync(ctx, collections):
"for debugging. This is slow and may crash for broken servers." "for debugging. This is slow and may crash for broken servers."
), ),
) )
@max_workers_option(default=1)
@pass_context @pass_context
@catch_errors @catch_errors
def discover(ctx, pairs, list): def discover(ctx, pairs, max_workers, list):
""" """
Refresh collection cache for the given pairs. Refresh collection cache for the given pairs.
""" """
from .tasks import discover_collections from .tasks import discover_collections
from .utils import WorkerQueue
config = ctx.config config = ctx.config
wq = WorkerQueue(max_workers)
async def main(): with wq.join():
async with aiohttp.TCPConnector(limit_per_host=16) as conn: for pair_name in pairs or config.pairs:
for pair_name in pairs or config.pairs: pair = config.get_pair(pair_name)
await discover_collections(
wq.put(
functools.partial(
discover_collections,
status_path=config.general["status_path"], status_path=config.general["status_path"],
pair=config.get_pair(pair_name), pair=pair,
from_cache=False, from_cache=False,
list_collections=list, list_collections=list,
connector=conn,
) )
)
asyncio.run(main()) wq.spawn_worker()
@app.command() @app.command()
@ -266,27 +274,4 @@ def repair(ctx, collection, repair_unsafe_uid):
"turn off other client's synchronization features." "turn off other client's synchronization features."
) )
click.confirm("Do you want to continue?", abort=True) click.confirm("Do you want to continue?", abort=True)
repair_collection(ctx.config, collection, repair_unsafe_uid=repair_unsafe_uid)
async def main():
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
await repair_collection(
ctx.config,
collection,
repair_unsafe_uid=repair_unsafe_uid,
connector=conn,
)
asyncio.run(main())
@app.command()
@pass_context
@catch_errors
def showconfig(ctx: AppContext):
"""Show the current configuration.
This is mostly intended to be used by scripts or other integrations.
If you need additional information in this dump, please reach out.
"""
config = {"storages": list(ctx.config.storages.values())}
click.echo(json.dumps(config, indent=2))

View file

@ -1,23 +1,19 @@
from __future__ import annotations
import json import json
import os import os
import string import string
from collections.abc import Generator
from configparser import RawConfigParser from configparser import RawConfigParser
from functools import cached_property
from itertools import chain from itertools import chain
from typing import IO
from typing import Any
from vdirsyncer import PROJECT_HOME from click_threading import get_ui_worker
from vdirsyncer import exceptions
from vdirsyncer.utils import expand_path
from vdirsyncer.vobject import Item
from .. import exceptions
from .. import PROJECT_HOME
from ..utils import cached_property
from ..utils import expand_path
from .fetchparams import expand_fetch_params from .fetchparams import expand_fetch_params
from .utils import storage_class_from_config from .utils import storage_class_from_config
GENERAL_ALL = frozenset(["status_path"]) GENERAL_ALL = frozenset(["status_path"])
GENERAL_REQUIRED = frozenset(["status_path"]) GENERAL_REQUIRED = frozenset(["status_path"])
SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_")) SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_"))
@ -28,16 +24,16 @@ def validate_section_name(name, section_type):
if invalid: if invalid:
chars_display = "".join(sorted(SECTION_NAME_CHARS)) chars_display = "".join(sorted(SECTION_NAME_CHARS))
raise exceptions.UserError( raise exceptions.UserError(
f'The {section_type}-section "{name}" contains invalid characters. Only ' 'The {}-section "{}" contains invalid characters. Only '
"the following characters are allowed for storage and " "the following characters are allowed for storage and "
f"pair names:\n{chars_display}" "pair names:\n{}".format(section_type, name, chars_display)
) )
def _validate_general_section(general_config: dict[str, str]): def _validate_general_section(general_config):
invalid = set(general_config) - GENERAL_ALL invalid = set(general_config) - GENERAL_ALL
missing = GENERAL_REQUIRED - set(general_config) missing = GENERAL_REQUIRED - set(general_config)
problems: list[str] = [] problems = []
if invalid: if invalid:
problems.append( problems.append(
@ -52,7 +48,7 @@ def _validate_general_section(general_config: dict[str, str]):
if problems: if problems:
raise exceptions.UserError( raise exceptions.UserError(
"Invalid general section. Copy the example " "Invalid general section. Copy the example "
f"config from the repository and edit it: {PROJECT_HOME}", "config from the repository and edit it: {}".format(PROJECT_HOME),
problems=problems, problems=problems,
) )
@ -93,31 +89,23 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.") raise ValueError("Duplicate value.")
collection_names.add(collection_name) collection_names.add(collection_name)
except ValueError as e: except ValueError as e:
raise ValueError(f"`collections` parameter, position {i}: {e!s}") raise ValueError(
"`collections` parameter, position {i}: {e}".format(i=i, e=str(e))
)
def _validate_implicit_param(implicit):
if implicit is None:
return
if implicit != "create":
raise ValueError("`implicit` parameter must be 'create' or absent.")
class _ConfigReader: class _ConfigReader:
def __init__(self, f: IO[Any]): def __init__(self, f):
self._file: IO[Any] = f self._file = f
self._parser = c = RawConfigParser() self._parser = c = RawConfigParser()
c.read_file(f) c.read_file(f)
self._seen_names: set = set() self._seen_names = set()
self._general: dict[str, str] = {} self._general = {}
self._pairs: dict[str, dict[str, str]] = {} self._pairs = {}
self._storages: dict[str, dict[str, str]] = {} self._storages = {}
def _parse_section( def _parse_section(self, section_type, name, options):
self, section_type: str, name: str, options: dict[str, Any]
) -> None:
validate_section_name(name, section_type) validate_section_name(name, section_type)
if name in self._seen_names: if name in self._seen_names:
raise ValueError(f'Name "{name}" already used.') raise ValueError(f'Name "{name}" already used.')
@ -134,9 +122,7 @@ class _ConfigReader:
else: else:
raise ValueError("Unknown section type.") raise ValueError("Unknown section type.")
def parse( def parse(self):
self,
) -> tuple[dict[str, str], dict[str, dict[str, str]], dict[str, dict[str, str]]]:
for section in self._parser.sections(): for section in self._parser.sections():
if " " in section: if " " in section:
section_type, name = section.split(" ", 1) section_type, name = section.split(" ", 1)
@ -150,7 +136,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)), dict(_parse_options(self._parser.items(section), section=section)),
) )
except ValueError as e: except ValueError as e:
raise exceptions.UserError(f'Section "{section}": {e!s}') raise exceptions.UserError('Section "{}": {}'.format(section, str(e)))
_validate_general_section(self._general) _validate_general_section(self._general)
if getattr(self._file, "name", None): if getattr(self._file, "name", None):
@ -162,29 +148,22 @@ class _ConfigReader:
return self._general, self._pairs, self._storages return self._general, self._pairs, self._storages
def _parse_options( def _parse_options(items, section=None):
items: list[tuple[str, str]], section: str | None = None
) -> Generator[tuple[str, dict[str, str]], None, None]:
for key, value in items: for key, value in items:
try: try:
yield key, json.loads(value) yield key, json.loads(value)
except ValueError as e: except ValueError as e:
raise ValueError(f'Section "{section}", option "{key}": {e}') raise ValueError('Section "{}", option "{}": {}'.format(section, key, e))
class Config: class Config:
def __init__( def __init__(self, general, pairs, storages):
self,
general: dict[str, str],
pairs: dict[str, dict[str, str]],
storages: dict[str, dict[str, str]],
) -> None:
self.general = general self.general = general
self.storages = storages self.storages = storages
for name, options in storages.items(): for name, options in storages.items():
options["instance_name"] = name options["instance_name"] = name
self.pairs: dict[str, PairConfig] = {} self.pairs = {}
for name, options in pairs.items(): for name, options in pairs.items():
try: try:
self.pairs[name] = PairConfig(self, name, options) self.pairs[name] = PairConfig(self, name, options)
@ -192,12 +171,12 @@ class Config:
raise exceptions.UserError(f"Pair {name}: {e}") raise exceptions.UserError(f"Pair {name}: {e}")
@classmethod @classmethod
def from_fileobject(cls, f: IO[Any]): def from_fileobject(cls, f):
reader = _ConfigReader(f) reader = _ConfigReader(f)
return cls(*reader.parse()) return cls(*reader.parse())
@classmethod @classmethod
def from_filename_or_environment(cls, fname: str | None = None): def from_filename_or_environment(cls, fname=None):
if fname is None: if fname is None:
fname = os.environ.get("VDIRSYNCER_CONFIG", None) fname = os.environ.get("VDIRSYNCER_CONFIG", None)
if fname is None: if fname is None:
@ -212,20 +191,24 @@ class Config:
with open(fname) as f: with open(fname) as f:
return cls.from_fileobject(f) return cls.from_fileobject(f)
except Exception as e: except Exception as e:
raise exceptions.UserError(f"Error during reading config {fname}: {e}") raise exceptions.UserError(
"Error during reading config {}: {}".format(fname, e)
)
def get_storage_args(self, storage_name: str): def get_storage_args(self, storage_name):
try: try:
args = self.storages[storage_name] args = self.storages[storage_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
f"Storage {storage_name!r} not found. " "Storage {!r} not found. "
f"These are the configured storages: {list(self.storages)}" "These are the configured storages: {}".format(
storage_name, list(self.storages)
)
) )
else: else:
return expand_fetch_params(args) return expand_fetch_params(args)
def get_pair(self, pair_name: str) -> PairConfig: def get_pair(self, pair_name):
try: try:
return self.pairs[pair_name] return self.pairs[pair_name]
except KeyError as e: except KeyError as e:
@ -233,15 +216,14 @@ class Config:
class PairConfig: class PairConfig:
def __init__(self, full_config: Config, name: str, options: dict[str, str]): def __init__(self, full_config, name, options):
self._config: Config = full_config self._config = full_config
self.name: str = name self.name = name
self.name_a: str = options.pop("a") self.name_a = options.pop("a")
self.name_b: str = options.pop("b") self.name_b = options.pop("b")
self.implicit = options.pop("implicit", None)
self._partial_sync: str | None = options.pop("partial_sync", None) self._partial_sync = options.pop("partial_sync", None)
self.metadata: str | tuple[()] = options.pop("metadata", ()) self.metadata = options.pop("metadata", None) or ()
self.conflict_resolution = self._process_conflict_resolution_param( self.conflict_resolution = self._process_conflict_resolution_param(
options.pop("conflict_resolution", None) options.pop("conflict_resolution", None)
@ -257,17 +239,14 @@ class PairConfig:
) )
else: else:
_validate_collections_param(self.collections) _validate_collections_param(self.collections)
_validate_implicit_param(self.implicit)
if options: if options:
raise ValueError("Unknown options: {}".format(", ".join(options))) raise ValueError("Unknown options: {}".format(", ".join(options)))
def _process_conflict_resolution_param( def _process_conflict_resolution_param(self, conflict_resolution):
self, conflict_resolution: str | list[str] | None
):
if conflict_resolution in (None, "a wins", "b wins"): if conflict_resolution in (None, "a wins", "b wins"):
return conflict_resolution return conflict_resolution
if ( elif (
isinstance(conflict_resolution, list) isinstance(conflict_resolution, list)
and len(conflict_resolution) > 1 and len(conflict_resolution) > 1
and conflict_resolution[0] == "command" and conflict_resolution[0] == "command"
@ -278,10 +257,15 @@ class PairConfig:
b_name = self.config_b["instance_name"] b_name = self.config_b["instance_name"]
command = conflict_resolution[1:] command = conflict_resolution[1:]
return _resolve_conflict_via_command(a, b, command, a_name, b_name) def inner():
return _resolve_conflict_via_command(a, b, command, a_name, b_name)
ui_worker = get_ui_worker()
return ui_worker.put(inner)
return resolve return resolve
raise ValueError("Invalid value for `conflict_resolution`.") else:
raise ValueError("Invalid value for `conflict_resolution`.")
# The following parameters are lazily evaluated because evaluating # The following parameters are lazily evaluated because evaluating
# self.config_a would expand all `x.fetch` parameters. This is costly and # self.config_a would expand all `x.fetch` parameters. This is costly and
@ -327,10 +311,10 @@ class PairConfig:
class CollectionConfig: class CollectionConfig:
def __init__(self, pair, name: str, config_a, config_b): def __init__(self, pair, name, config_a, config_b):
self.pair = pair self.pair = pair
self._config = pair._config self._config = pair._config
self.name: str = name self.name = name
self.config_a = config_a self.config_a = config_a
self.config_b = config_b self.config_b = config_b
@ -339,16 +323,14 @@ class CollectionConfig:
load_config = Config.from_filename_or_environment load_config = Config.from_filename_or_environment
def _resolve_conflict_via_command( def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None):
a, b, command, a_name, b_name, _check_call=None
) -> Item:
import shutil
import tempfile import tempfile
import shutil
if _check_call is None: if _check_call is None:
from subprocess import check_call as _check_call from subprocess import check_call as _check_call
from vdirsyncer.vobject import Item from ..vobject import Item
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.") dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
try: try:
@ -361,7 +343,7 @@ def _resolve_conflict_via_command(
f.write(b.raw) f.write(b.raw)
command[0] = expand_path(command[0]) command[0] = expand_path(command[0])
_check_call([*command, a_tmp, b_tmp]) _check_call(command + [a_tmp, b_tmp])
with open(a_tmp) as f: with open(a_tmp) as f:
new_a = f.read() new_a = f.read()
@ -369,7 +351,7 @@ def _resolve_conflict_via_command(
new_b = f.read() new_b = f.read()
if new_a != new_b: if new_a != new_b:
raise exceptions.UserError("The two files are not completely equal.") raise exceptions.UserError("The two files are not completely " "equal.")
return Item(new_a) return Item(new_a)
finally: finally:
shutil.rmtree(dir) shutil.rmtree(dir)

View file

@ -1,16 +1,10 @@
from __future__ import annotations
import asyncio
import hashlib import hashlib
import json import json
import logging import logging
import sys import sys
import aiohttp from .. import exceptions
import aiostream from ..utils import cached_property
from vdirsyncer import exceptions
from .utils import handle_collection_not_found from .utils import handle_collection_not_found
from .utils import handle_storage_init_error from .utils import handle_storage_init_error
from .utils import load_status from .utils import load_status
@ -18,6 +12,7 @@ from .utils import save_status
from .utils import storage_class_from_config from .utils import storage_class_from_config
from .utils import storage_instance_from_config from .utils import storage_instance_from_config
# Increase whenever upgrade potentially breaks discovery cache and collections # Increase whenever upgrade potentially breaks discovery cache and collections
# should be re-discovered # should be re-discovered
DISCOVERY_CACHE_VERSION = 1 DISCOVERY_CACHE_VERSION = 1
@ -40,14 +35,7 @@ def _get_collections_cache_key(pair):
return m.hexdigest() return m.hexdigest()
async def collections_for_pair( def collections_for_pair(status_path, pair, from_cache=True, list_collections=False):
status_path,
pair,
from_cache=True,
list_collections=False,
*,
connector: aiohttp.TCPConnector,
):
"""Determine all configured collections for a given pair. Takes care of """Determine all configured collections for a given pair. Takes care of
shortcut expansion and result caching. shortcut expansion and result caching.
@ -66,58 +54,44 @@ async def collections_for_pair(
rv["collections"], pair.config_a, pair.config_b rv["collections"], pair.config_a, pair.config_b
) )
) )
if rv: elif rv:
raise exceptions.UserError( raise exceptions.UserError(
"Detected change in config file, " "Detected change in config file, "
f"please run `vdirsyncer discover {pair.name}`." "please run `vdirsyncer discover {}`.".format(pair.name)
)
else:
raise exceptions.UserError(
"Please run `vdirsyncer discover {}` "
" before synchronization.".format(pair.name)
) )
raise exceptions.UserError(
f"Please run `vdirsyncer discover {pair.name}` before synchronization."
)
logger.info(f"Discovering collections for pair {pair.name}") logger.info("Discovering collections for pair {}".format(pair.name))
a_discovered = DiscoverResult(pair.config_a, connector=connector) a_discovered = _DiscoverResult(pair.config_a)
b_discovered = DiscoverResult(pair.config_b, connector=connector) b_discovered = _DiscoverResult(pair.config_b)
if list_collections: if list_collections:
# TODO: We should gather data and THEN print, so it can be async. _print_collections(pair.config_a["instance_name"], a_discovered.get_self)
await _print_collections( _print_collections(pair.config_b["instance_name"], b_discovered.get_self)
pair.config_a["instance_name"],
a_discovered.get_self,
connector=connector,
)
await _print_collections(
pair.config_b["instance_name"],
b_discovered.get_self,
connector=connector,
)
async def _handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
return await handle_collection_not_found(
config, collection, e=e, implicit_create=pair.implicit == "create"
)
# We have to use a list here because the special None/null value would get # We have to use a list here because the special None/null value would get
# mangled to string (because JSON objects always have string keys). # mangled to string (because JSON objects always have string keys).
rv = await aiostream.stream.list( # type: ignore[assignment] rv = list(
expand_collections( expand_collections(
shortcuts=pair.collections, shortcuts=pair.collections,
config_a=pair.config_a, config_a=pair.config_a,
config_b=pair.config_b, config_b=pair.config_b,
get_a_discovered=a_discovered.get_self, get_a_discovered=a_discovered.get_self,
get_b_discovered=b_discovered.get_self, get_b_discovered=b_discovered.get_self,
_handle_collection_not_found=_handle_collection_not_found, _handle_collection_not_found=handle_collection_not_found,
) )
) )
await _sanity_check_collections(rv, connector=connector) _sanity_check_collections(rv)
save_status( save_status(
base_path=status_path, status_path,
pair=pair.name, pair.name,
data_type="collections", data_type="collections",
data={ data={
"collections": list( "collections": list(
@ -129,14 +103,10 @@ async def collections_for_pair(
return rv return rv
async def _sanity_check_collections(collections, *, connector): def _sanity_check_collections(collections):
tasks = []
for _, (a_args, b_args) in collections: for _, (a_args, b_args) in collections:
tasks.append(storage_instance_from_config(a_args, connector=connector)) storage_instance_from_config(a_args)
tasks.append(storage_instance_from_config(b_args, connector=connector)) storage_instance_from_config(b_args)
await asyncio.gather(*tasks)
def _compress_collections_cache(collections, config_a, config_b): def _compress_collections_cache(collections, config_a, config_b):
@ -163,30 +133,18 @@ def _expand_collections_cache(collections, config_a, config_b):
yield name, (a, b) yield name, (a, b)
class DiscoverResult: class _DiscoverResult:
def __init__(self, config, *, connector): def __init__(self, config):
self._cls, _ = storage_class_from_config(config) self._cls, _ = storage_class_from_config(config)
if self._cls.__name__ in [
"CardDAVStorage",
"CalDAVStorage",
"GoogleCalendarStorage",
"GoogleContactsStorage",
]:
assert connector is not None
config["connector"] = connector
self._config = config self._config = config
self._discovered = None
async def get_self(self): def get_self(self):
if self._discovered is None:
self._discovered = await self._discover()
return self._discovered return self._discovered
async def _discover(self): @cached_property
def _discovered(self):
try: try:
discovered = await aiostream.stream.list(self._cls.discover(**self._config)) discovered = list(self._cls.discover(**self._config))
except NotImplementedError: except NotImplementedError:
return {} return {}
except Exception: except Exception:
@ -200,7 +158,7 @@ class DiscoverResult:
return rv return rv
async def expand_collections( def expand_collections(
shortcuts, shortcuts,
config_a, config_a,
config_b, config_b,
@ -215,9 +173,9 @@ async def expand_collections(
for shortcut in shortcuts: for shortcut in shortcuts:
if shortcut == "from a": if shortcut == "from a":
collections = await get_a_discovered() collections = get_a_discovered()
elif shortcut == "from b": elif shortcut == "from b":
collections = await get_b_discovered() collections = get_b_discovered()
else: else:
collections = [shortcut] collections = [shortcut]
@ -231,23 +189,17 @@ async def expand_collections(
continue continue
handled_collections.add(collection) handled_collections.add(collection)
a_args = await _collection_from_discovered( a_args = _collection_from_discovered(
get_a_discovered, get_a_discovered, collection_a, config_a, _handle_collection_not_found
collection_a,
config_a,
_handle_collection_not_found,
) )
b_args = await _collection_from_discovered( b_args = _collection_from_discovered(
get_b_discovered, get_b_discovered, collection_b, config_b, _handle_collection_not_found
collection_b,
config_b,
_handle_collection_not_found,
) )
yield collection, (a_args, b_args) yield collection, (a_args, b_args)
async def _collection_from_discovered( def _collection_from_discovered(
get_discovered, collection, config, _handle_collection_not_found get_discovered, collection, config, _handle_collection_not_found
): ):
if collection is None: if collection is None:
@ -256,19 +208,14 @@ async def _collection_from_discovered(
return args return args
try: try:
return (await get_discovered())[collection] return get_discovered()[collection]
except KeyError: except KeyError:
return await _handle_collection_not_found(config, collection) return _handle_collection_not_found(config, collection)
async def _print_collections( def _print_collections(instance_name, get_discovered):
instance_name: str,
get_discovered,
*,
connector: aiohttp.TCPConnector,
):
try: try:
discovered = await get_discovered() discovered = get_discovered()
except exceptions.UserError: except exceptions.UserError:
raise raise
except Exception: except Exception:
@ -279,37 +226,28 @@ async def _print_collections(
logger.debug("".join(traceback.format_tb(sys.exc_info()[2]))) logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
logger.warning( logger.warning(
f"Failed to discover collections for {instance_name}, use `-vdebug` " "Failed to discover collections for {}, use `-vdebug` "
"to see the full traceback." "to see the full traceback.".format(instance_name)
) )
return return
logger.info(f"{instance_name}:") logger.info(f"{instance_name}:")
tasks = []
for args in discovered.values(): for args in discovered.values():
tasks.append(_print_single_collection(args, instance_name, connector)) collection = args["collection"]
if collection is None:
continue
await asyncio.gather(*tasks) args["instance_name"] = instance_name
try:
storage = storage_instance_from_config(args, create=False)
displayname = storage.get_meta("displayname")
except Exception:
displayname = ""
logger.info(
async def _print_single_collection(args, instance_name, connector): " - {}{}".format(
collection = args["collection"] json.dumps(collection),
if collection is None: f' ("{displayname}")'
return if displayname and displayname != collection
else "",
args["instance_name"] = instance_name )
try:
storage = await storage_instance_from_config(
args,
create=False,
connector=connector,
) )
displayname = await storage.get_meta("displayname")
except Exception:
displayname = ""
logger.info(
" - {}{}".format(
json.dumps(collection),
f' ("{displayname}")' if displayname and displayname != collection else "",
)
)

View file

@ -1,14 +1,11 @@
from __future__ import annotations
import logging import logging
import click import click
from vdirsyncer import exceptions
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import synchronized
from . import AppContext from . import AppContext
from .. import exceptions
from ..utils import expand_path
from ..utils import synchronized
SUFFIX = ".fetch" SUFFIX = ".fetch"
@ -33,14 +30,16 @@ def expand_fetch_params(config):
@synchronized() @synchronized()
def _fetch_value(opts, key): def _fetch_value(opts, key):
if not isinstance(opts, list): if not isinstance(opts, list):
raise ValueError(f"Invalid value for {key}: Expected a list, found {opts!r}.") raise ValueError(
"Invalid value for {}: Expected a list, found {!r}.".format(key, opts)
)
if not opts: if not opts:
raise ValueError("Expected list of length > 0.") raise ValueError("Expected list of length > 0.")
try: try:
ctx = click.get_current_context().find_object(AppContext) ctx = click.get_current_context().find_object(AppContext)
if ctx is None: if ctx is None:
raise RuntimeError raise RuntimeError()
password_cache = ctx.fetched_params password_cache = ctx.fetched_params
except RuntimeError: except RuntimeError:
password_cache = {} password_cache = {}
@ -59,7 +58,7 @@ def _fetch_value(opts, key):
except KeyError: except KeyError:
raise exceptions.UserError(f"Unknown strategy: {strategy}") raise exceptions.UserError(f"Unknown strategy: {strategy}")
logger.debug(f"Fetching value for {key} with {strategy} strategy.") logger.debug("Fetching value for {} with {} strategy.".format(key, strategy))
try: try:
rv = strategy_fn(*opts[1:]) rv = strategy_fn(*opts[1:])
except (click.Abort, KeyboardInterrupt) as e: except (click.Abort, KeyboardInterrupt) as e:
@ -68,32 +67,24 @@ def _fetch_value(opts, key):
else: else:
if not rv: if not rv:
raise exceptions.UserError( raise exceptions.UserError(
f"Empty value for {key}, this most likely indicates an error." "Empty value for {}, this most likely "
"indicates an error.".format(key)
) )
password_cache[cache_key] = rv password_cache[cache_key] = rv
return rv return rv
def _strategy_command(*command: str, shell: bool = False): def _strategy_command(*command):
"""Execute a user-specified command and return its output."""
import subprocess import subprocess
# Normalize path of every path member. command = (expand_path(command[0]),) + command[1:]
# If there is no path specified then nothing will happen.
# Makes this a list to avoid it being exhausted on the first iteration.
expanded_command = list(map(expand_path, command))
try: try:
stdout = subprocess.check_output(expanded_command, text=True, shell=shell) stdout = subprocess.check_output(command, universal_newlines=True)
return stdout.strip("\n") return stdout.strip("\n")
except OSError as e: except OSError as e:
cmd = " ".join(expanded_command) raise exceptions.UserError(
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}") "Failed to execute command: {}\n{}".format(" ".join(command), str(e))
)
def _strategy_shell(*command: str):
"""Execute a user-specified command string in a shell and return its output."""
return _strategy_command(*command, shell=True)
def _strategy_prompt(text): def _strategy_prompt(text):
@ -102,6 +93,5 @@ def _strategy_prompt(text):
STRATEGIES = { STRATEGIES = {
"command": _strategy_command, "command": _strategy_command,
"shell": _strategy_shell,
"prompt": _strategy_prompt, "prompt": _strategy_prompt,
} }

View file

@ -1,64 +1,62 @@
from __future__ import annotations import functools
import json import json
import aiohttp from .. import exceptions
from .. import sync
from vdirsyncer import exceptions
from vdirsyncer import sync
from .config import CollectionConfig from .config import CollectionConfig
from .discover import DiscoverResult
from .discover import collections_for_pair from .discover import collections_for_pair
from .discover import storage_class_from_config
from .discover import storage_instance_from_config from .discover import storage_instance_from_config
from .utils import JobFailed
from .utils import cli_logger from .utils import cli_logger
from .utils import get_status_name from .utils import get_status_name
from .utils import handle_cli_error from .utils import handle_cli_error
from .utils import JobFailed
from .utils import load_status from .utils import load_status
from .utils import manage_sync_status from .utils import manage_sync_status
from .utils import save_status from .utils import save_status
async def prepare_pair(pair_name, collections, config, *, connector): def prepare_pair(wq, pair_name, collections, config, callback, **kwargs):
pair = config.get_pair(pair_name) pair = config.get_pair(pair_name)
all_collections = dict( all_collections = dict(
await collections_for_pair( collections_for_pair(status_path=config.general["status_path"], pair=pair)
status_path=config.general["status_path"],
pair=pair,
connector=connector,
)
) )
# spawn one worker less because we can reuse the current one
new_workers = -1
for collection_name in collections or all_collections: for collection_name in collections or all_collections:
try: try:
config_a, config_b = all_collections[collection_name] config_a, config_b = all_collections[collection_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found." "Pair {}: Collection {} not found. These are the "
f"These are the configured collections:\n{list(all_collections)}" "configured collections:\n{}".format(
pair_name, json.dumps(collection_name), list(all_collections)
)
) )
new_workers += 1
collection = CollectionConfig(pair, collection_name, config_a, config_b) collection = CollectionConfig(pair, collection_name, config_a, config_b)
yield collection, config.general wq.put(
functools.partial(
callback, collection=collection, general=config.general, **kwargs
)
)
for _ in range(new_workers):
wq.spawn_worker()
async def sync_collection( def sync_collection(wq, collection, general, force_delete):
collection,
general,
force_delete,
*,
connector: aiohttp.TCPConnector,
):
pair = collection.pair pair = collection.pair
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)
try: try:
cli_logger.info(f"Syncing {status_name}") cli_logger.info(f"Syncing {status_name}")
a = await storage_instance_from_config(collection.config_a, connector=connector) a = storage_instance_from_config(collection.config_a)
b = await storage_instance_from_config(collection.config_b, connector=connector) b = storage_instance_from_config(collection.config_b)
sync_failed = False sync_failed = False
@ -70,7 +68,7 @@ async def sync_collection(
with manage_sync_status( with manage_sync_status(
general["status_path"], pair.name, collection.name general["status_path"], pair.name, collection.name
) as status: ) as status:
await sync.sync( sync.sync(
a, a,
b, b,
status, status,
@ -81,61 +79,57 @@ async def sync_collection(
) )
if sync_failed: if sync_failed:
raise JobFailed raise JobFailed()
except JobFailed: except JobFailed:
raise raise
except BaseException: except BaseException:
handle_cli_error(status_name) handle_cli_error(status_name)
raise JobFailed raise JobFailed()
async def discover_collections(pair, **kwargs): def discover_collections(wq, pair, **kwargs):
rv = await collections_for_pair(pair=pair, **kwargs) rv = collections_for_pair(pair=pair, **kwargs)
collections = [c for c, (a, b) in rv] collections = list(c for c, (a, b) in rv)
if collections == [None]: if collections == [None]:
collections = None collections = None
cli_logger.info(f"Saved for {pair.name}: collections = {json.dumps(collections)}") cli_logger.info(
"Saved for {}: collections = {}".format(pair.name, json.dumps(collections))
)
async def repair_collection( def repair_collection(config, collection, repair_unsafe_uid):
config, from ..repair import repair_storage
collection,
repair_unsafe_uid,
*,
connector: aiohttp.TCPConnector,
):
from vdirsyncer.repair import repair_storage
storage_name, collection = collection, None storage_name, collection = collection, None
if "/" in storage_name: if "/" in storage_name:
storage_name, collection = storage_name.split("/") storage_name, collection = storage_name.split("/")
config = config.get_storage_args(storage_name) config = config.get_storage_args(storage_name)
# If storage type has a slash, ignore it and anything after it. storage_type = config["type"]
storage_type = config["type"].split("/")[0]
if collection is not None: if collection is not None:
cli_logger.info("Discovering collections (skipping cache).") cli_logger.info("Discovering collections (skipping cache).")
get_discovered = DiscoverResult(config, connector=connector) cls, config = storage_class_from_config(config)
discovered = await get_discovered.get_self() for config in cls.discover(**config):
for config in discovered.values():
if config["collection"] == collection: if config["collection"] == collection:
break break
else: else:
raise exceptions.UserError( raise exceptions.UserError(
f"Couldn't find collection {collection} for storage {storage_name}." "Couldn't find collection {} for storage {}.".format(
collection, storage_name
)
) )
config["type"] = storage_type config["type"] = storage_type
storage = await storage_instance_from_config(config, connector=connector) storage = storage_instance_from_config(config)
cli_logger.info(f"Repairing {storage_name}/{collection}") cli_logger.info(f"Repairing {storage_name}/{collection}")
cli_logger.warning("Make sure no other program is talking to the server.") cli_logger.warning("Make sure no other program is talking to the server.")
await repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid) repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid)
async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector): def metasync_collection(wq, collection, general):
from vdirsyncer.metasync import metasync from ..metasync import metasync
pair = collection.pair pair = collection.pair
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)
@ -143,17 +137,17 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
try: try:
cli_logger.info(f"Metasyncing {status_name}") cli_logger.info(f"Metasyncing {status_name}")
status = load_status( status = (
general["status_path"], load_status(
pair.name, general["status_path"], pair.name, collection.name, data_type="metadata"
collection.name, )
data_type="metadata", or {}
) )
a = await storage_instance_from_config(collection.config_a, connector=connector) a = storage_instance_from_config(collection.config_a)
b = await storage_instance_from_config(collection.config_b, connector=connector) b = storage_instance_from_config(collection.config_b)
await metasync( metasync(
a, a,
b, b,
status, status,
@ -162,12 +156,12 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
) )
except BaseException: except BaseException:
handle_cli_error(status_name) handle_cli_error(status_name)
raise JobFailed raise JobFailed()
save_status( save_status(
base_path=general["status_path"], general["status_path"],
pair=pair.name, pair.name,
collection.name,
data_type="metadata", data_type="metadata",
data=status, data=status,
collection=collection.name,
) )

View file

@ -1,48 +1,48 @@
from __future__ import annotations
import contextlib import contextlib
import errno import errno
import importlib import importlib
import itertools
import json import json
import os import os
import queue
import sys import sys
from typing import Any
import aiohttp
import click import click
import click_threading
from vdirsyncer import BUGTRACKER_HOME from atomicwrites import atomic_write
from vdirsyncer import DOCS_HOME
from vdirsyncer import exceptions
from vdirsyncer.storage.base import Storage
from vdirsyncer.sync.exceptions import IdentConflict
from vdirsyncer.sync.exceptions import PartialSync
from vdirsyncer.sync.exceptions import StorageEmpty
from vdirsyncer.sync.exceptions import SyncConflict
from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_storage_init_args
from . import cli_logger from . import cli_logger
from .. import BUGTRACKER_HOME
from .. import DOCS_HOME
from .. import exceptions
from ..sync.exceptions import IdentConflict
from ..sync.exceptions import PartialSync
from ..sync.exceptions import StorageEmpty
from ..sync.exceptions import SyncConflict
from ..sync.status import SqliteStatus
from ..utils import expand_path
from ..utils import get_storage_init_args
STATUS_PERMISSIONS = 0o600 STATUS_PERMISSIONS = 0o600
STATUS_DIR_PERMISSIONS = 0o700 STATUS_DIR_PERMISSIONS = 0o700
class _StorageIndex: class _StorageIndex:
def __init__(self) -> None: def __init__(self):
self._storages: dict[str, str] = { self._storages = dict(
"caldav": "vdirsyncer.storage.dav.CalDAVStorage", caldav="vdirsyncer.storage.dav.CalDAVStorage",
"carddav": "vdirsyncer.storage.dav.CardDAVStorage", carddav="vdirsyncer.storage.dav.CardDAVStorage",
"filesystem": "vdirsyncer.storage.filesystem.FilesystemStorage", filesystem="vdirsyncer.storage.filesystem.FilesystemStorage",
"http": "vdirsyncer.storage.http.HttpStorage", http="vdirsyncer.storage.http.HttpStorage",
"singlefile": "vdirsyncer.storage.singlefile.SingleFileStorage", singlefile="vdirsyncer.storage.singlefile.SingleFileStorage",
"google_calendar": "vdirsyncer.storage.google.GoogleCalendarStorage", google_calendar="vdirsyncer.storage.google.GoogleCalendarStorage",
"google_contacts": "vdirsyncer.storage.google.GoogleContactsStorage", google_contacts="vdirsyncer.storage.google.GoogleContactsStorage",
} etesync_calendars="vdirsyncer.storage.etesync.EtesyncCalendars",
etesync_contacts="vdirsyncer.storage.etesync.EtesyncContacts",
)
def __getitem__(self, name: str) -> Storage: def __getitem__(self, name):
item = self._storages[name] item = self._storages[name]
if not isinstance(item, str): if not isinstance(item, str):
return item return item
@ -79,27 +79,33 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.critical(e) cli_logger.critical(e)
except StorageEmpty as e: except StorageEmpty as e:
cli_logger.error( cli_logger.error(
f'{status_name}: Storage "{e.empty_storage.instance_name}" was ' '{status_name}: Storage "{name}" was completely emptied. If you '
"completely emptied. If you want to delete ALL entries on BOTH sides," "want to delete ALL entries on BOTH sides, then use "
f"then use `vdirsyncer sync --force-delete {status_name}`. " "`vdirsyncer sync --force-delete {status_name}`. "
f"Otherwise delete the files for {status_name} in your status " "Otherwise delete the files for {status_name} in your status "
"directory." "directory.".format(
name=e.empty_storage.instance_name, status_name=status_name
)
) )
except PartialSync as e: except PartialSync as e:
cli_logger.error( cli_logger.error(
f"{status_name}: Attempted change on {e.storage}, which is read-only" "{status_name}: Attempted change on {storage}, which is read-only"
". Set `partial_sync` in your pair section to `ignore` to ignore " ". Set `partial_sync` in your pair section to `ignore` to ignore "
"those changes, or `revert` to revert them on the other side." "those changes, or `revert` to revert them on the other side.".format(
status_name=status_name, storage=e.storage
)
) )
except SyncConflict as e: except SyncConflict as e:
cli_logger.error( cli_logger.error(
f"{status_name}: One item changed on both sides. Resolve this " "{status_name}: One item changed on both sides. Resolve this "
"conflict manually, or by setting the `conflict_resolution` " "conflict manually, or by setting the `conflict_resolution` "
"parameter in your config file.\n" "parameter in your config file.\n"
f"See also {DOCS_HOME}/config.html#pair-section\n" "See also {docs}/config.html#pair-section\n"
f"Item ID: {e.ident}\n" "Item ID: {e.ident}\n"
f"Item href on side A: {e.href_a}\n" "Item href on side A: {e.href_a}\n"
f"Item href on side B: {e.href_b}\n" "Item href on side B: {e.href_b}\n".format(
status_name=status_name, e=e, docs=DOCS_HOME
)
) )
except IdentConflict as e: except IdentConflict as e:
cli_logger.error( cli_logger.error(
@ -120,17 +126,17 @@ def handle_cli_error(status_name=None, e=None):
pass pass
except exceptions.PairNotFound as e: except exceptions.PairNotFound as e:
cli_logger.error( cli_logger.error(
f"Pair {e.pair_name} does not exist. Please check your " "Pair {pair_name} does not exist. Please check your "
"configuration file and make sure you've typed the pair name " "configuration file and make sure you've typed the pair name "
"correctly" "correctly".format(pair_name=e.pair_name)
) )
except exceptions.InvalidResponse as e: except exceptions.InvalidResponse as e:
cli_logger.error( cli_logger.error(
"The server returned something vdirsyncer doesn't understand. " "The server returned something vdirsyncer doesn't understand. "
f"Error message: {e!r}\n" "Error message: {!r}\n"
"While this is most likely a serverside problem, the vdirsyncer " "While this is most likely a serverside problem, the vdirsyncer "
"devs are generally interested in such bugs. Please report it in " "devs are generally interested in such bugs. Please report it in "
f"the issue tracker at {BUGTRACKER_HOME}" "the issue tracker at {}".format(e, BUGTRACKER_HOME)
) )
except exceptions.CollectionRequired: except exceptions.CollectionRequired:
cli_logger.error( cli_logger.error(
@ -153,40 +159,32 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.debug("".join(tb)) cli_logger.debug("".join(tb))
def get_status_name(pair: str, collection: str | None) -> str: def get_status_name(pair, collection):
if collection is None: if collection is None:
return pair return pair
return pair + "/" + collection return pair + "/" + collection
def get_status_path( def get_status_path(base_path, pair, collection=None, data_type=None):
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> str:
assert data_type is not None assert data_type is not None
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) path = expand_path(os.path.join(base_path, status_name))
if os.path.isfile(path) and data_type == "items": if os.path.isfile(path) and data_type == "items":
new_path = path + ".items" new_path = path + ".items"
# XXX: Legacy migration # XXX: Legacy migration
cli_logger.warning(f"Migrating statuses: Renaming {path} to {new_path}") cli_logger.warning(
"Migrating statuses: Renaming {} to {}".format(path, new_path)
)
os.rename(path, new_path) os.rename(path, new_path)
path += "." + data_type path += "." + data_type
return path return path
def load_status( def load_status(base_path, pair, collection=None, data_type=None):
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> dict[str, Any]:
path = get_status_path(base_path, pair, collection, data_type) path = get_status_path(base_path, pair, collection, data_type)
if not os.path.exists(path): if not os.path.exists(path):
return {} return None
assert_permissions(path, STATUS_PERMISSIONS) assert_permissions(path, STATUS_PERMISSIONS)
with open(path) as f: with open(path) as f:
@ -198,7 +196,7 @@ def load_status(
return {} return {}
def prepare_status_path(path: str) -> None: def prepare_status_path(path):
dirname = os.path.dirname(path) dirname = os.path.dirname(path)
try: try:
@ -209,7 +207,7 @@ def prepare_status_path(path: str) -> None:
@contextlib.contextmanager @contextlib.contextmanager
def manage_sync_status(base_path: str, pair_name: str, collection_name: str): def manage_sync_status(base_path, pair_name, collection_name):
path = get_status_path(base_path, pair_name, collection_name, "items") path = get_status_path(base_path, pair_name, collection_name, "items")
status = None status = None
legacy_status = None legacy_status = None
@ -231,17 +229,12 @@ def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
prepare_status_path(path) prepare_status_path(path)
status = SqliteStatus(path) status = SqliteStatus(path)
with contextlib.closing(status): yield status
yield status
def save_status( def save_status(base_path, pair, collection=None, data_type=None, data=None):
base_path: str, assert data_type is not None
pair: str, assert data is not None
data_type: str,
data: dict[str, Any],
collection: str | None = None,
) -> None:
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) + "." + data_type path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
prepare_status_path(path) prepare_status_path(path)
@ -262,38 +255,24 @@ def storage_class_from_config(config):
return cls, config return cls, config
async def storage_instance_from_config( def storage_instance_from_config(config, create=True):
config,
create=True,
*,
connector: aiohttp.TCPConnector,
):
""" """
:param config: A configuration dictionary to pass as kwargs to the class :param config: A configuration dictionary to pass as kwargs to the class
corresponding to config['type'] corresponding to config['type']
""" """
from vdirsyncer.storage.dav import DAVStorage
from vdirsyncer.storage.http import HttpStorage
cls, new_config = storage_class_from_config(config) cls, new_config = storage_class_from_config(config)
if issubclass(cls, DAVStorage) or issubclass(cls, HttpStorage):
assert connector is not None # FIXME: hack?
new_config["connector"] = connector
try: try:
return cls(**new_config) return cls(**new_config)
except exceptions.CollectionNotFound as e: except exceptions.CollectionNotFound as e:
if create: if create:
config = await handle_collection_not_found( config = handle_collection_not_found(
config, config.get("collection", None), e=str(e), implicit_create=True config, config.get("collection", None), e=str(e)
) )
return await storage_instance_from_config( return storage_instance_from_config(config, create=False)
config, else:
create=False, raise
connector=connector,
)
raise
except Exception: except Exception:
return handle_storage_init_error(cls, new_config) return handle_storage_init_error(cls, new_config)
@ -332,18 +311,104 @@ def handle_storage_init_error(cls, config):
) )
def assert_permissions(path: str, wanted: int) -> None: class WorkerQueue:
"""
A simple worker-queue setup.
Note that workers quit if queue is empty. That means you have to first put
things into the queue before spawning the worker!
"""
def __init__(self, max_workers):
self._queue = queue.Queue()
self._workers = []
self._max_workers = max_workers
self._shutdown_handlers = []
# According to http://stackoverflow.com/a/27062830, those are
# threadsafe compared to increasing a simple integer variable.
self.num_done_tasks = itertools.count()
self.num_failed_tasks = itertools.count()
def shutdown(self):
while self._shutdown_handlers:
try:
self._shutdown_handlers.pop()()
except Exception:
pass
def _worker(self):
while True:
try:
func = self._queue.get(False)
except queue.Empty:
break
try:
func(wq=self)
except Exception:
handle_cli_error()
next(self.num_failed_tasks)
finally:
self._queue.task_done()
next(self.num_done_tasks)
if not self._queue.unfinished_tasks:
self.shutdown()
def spawn_worker(self):
if self._max_workers and len(self._workers) >= self._max_workers:
return
t = click_threading.Thread(target=self._worker)
t.start()
self._workers.append(t)
@contextlib.contextmanager
def join(self):
assert self._workers or not self._queue.unfinished_tasks
ui_worker = click_threading.UiWorker()
self._shutdown_handlers.append(ui_worker.shutdown)
_echo = click.echo
with ui_worker.patch_click():
yield
if not self._workers:
# Ugly hack, needed because ui_worker is not running.
click.echo = _echo
cli_logger.critical("Nothing to do.")
sys.exit(5)
ui_worker.run()
self._queue.join()
for worker in self._workers:
worker.join()
tasks_failed = next(self.num_failed_tasks)
tasks_done = next(self.num_done_tasks)
if tasks_failed > 0:
cli_logger.error(
"{} out of {} tasks failed.".format(tasks_failed, tasks_done)
)
sys.exit(1)
def put(self, f):
return self._queue.put(f)
def assert_permissions(path, wanted):
permissions = os.stat(path).st_mode & 0o777 permissions = os.stat(path).st_mode & 0o777
if permissions > wanted: if permissions > wanted:
cli_logger.warning( cli_logger.warning(
f"Correcting permissions of {path} from {permissions:o} to {wanted:o}" "Correcting permissions of {} from {:o} to {:o}".format(
path, permissions, wanted
)
) )
os.chmod(path, wanted) os.chmod(path, wanted)
async def handle_collection_not_found( def handle_collection_not_found(config, collection, e=None):
config, collection, e=None, implicit_create=False
):
storage_name = config.get("instance_name", None) storage_name = config.get("instance_name", None)
cli_logger.warning( cli_logger.warning(
@ -352,19 +417,19 @@ async def handle_collection_not_found(
) )
) )
if implicit_create or click.confirm("Should vdirsyncer attempt to create it?"): if click.confirm("Should vdirsyncer attempt to create it?"):
storage_type = config["type"] storage_type = config["type"]
cls, config = storage_class_from_config(config) cls, config = storage_class_from_config(config)
config["collection"] = collection config["collection"] = collection
try: try:
args = await cls.create_collection(**config) args = cls.create_collection(**config)
args["type"] = storage_type args["type"] = storage_type
return args return args
except NotImplementedError as e: except NotImplementedError as e:
cli_logger.error(e) cli_logger.error(e)
raise exceptions.UserError( raise exceptions.UserError(
f'Unable to find or create collection "{collection}" for ' 'Unable to find or create collection "{collection}" for '
f'storage "{storage_name}". Please create the collection ' 'storage "{storage}". Please create the collection '
"yourself." "yourself.".format(collection=collection, storage=storage_name)
) )

View file

@ -3,8 +3,6 @@ Contains exception classes used by vdirsyncer. Not all exceptions are here,
only the most commonly used ones. only the most commonly used ones.
""" """
from __future__ import annotations
class Error(Exception): class Error(Exception):
"""Baseclass for all errors.""" """Baseclass for all errors."""

View file

@ -1,150 +1,94 @@
from __future__ import annotations
import asyncio
import logging import logging
import os
import platform
import re
from abc import ABC
from abc import abstractmethod
from base64 import b64encode
from ssl import create_default_context
import aiohttp import requests
import requests.auth
from aiohttp import ServerDisconnectedError
from aiohttp import ServerTimeoutError
from requests.utils import parse_dict_header
from tenacity import retry
from tenacity import retry_if_exception_type
from tenacity import stop_after_attempt
from tenacity import wait_exponential
from . import __version__ from . import __version__
from . import DOCS_HOME
from . import exceptions from . import exceptions
from .utils import expand_path from .utils import expand_path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
USERAGENT = f"vdirsyncer/{__version__}" USERAGENT = f"vdirsyncer/{__version__}"
# 'hack' to prevent aiohttp from loading the netrc config,
# but still allow it to read PROXY_* env vars. def _detect_faulty_requests(): # pragma: no cover
# Otherwise, if our host is defined in the netrc config, text = (
# aiohttp will overwrite our Authorization header. "Error during import: {e}\n\n"
# https://github.com/pimutils/vdirsyncer/issues/1138 "If you have installed vdirsyncer from a distro package, please file "
os.environ["NETRC"] = "NUL" if platform.system() == "Windows" else "/dev/null" "a bug against that package, not vdirsyncer.\n\n"
"Consult {d}/problems.html#requests-related-importerrors"
"-based-distributions on how to work around this."
)
try:
from requests_toolbelt.auth.guess import GuessAuth # noqa
except ImportError as e:
import sys
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
sys.exit(1)
class AuthMethod(ABC): _detect_faulty_requests()
def __init__(self, username, password): del _detect_faulty_requests
self.username = username
self.password = password
@abstractmethod
def handle_401(self, response):
raise NotImplementedError
@abstractmethod
def get_auth_header(self, method, url):
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, AuthMethod):
return False
return (
self.__class__ == other.__class__
and self.username == other.username
and self.password == other.password
)
class BasicAuthMethod(AuthMethod):
def handle_401(self, _response):
pass
def get_auth_header(self, _method, _url):
auth_str = f"{self.username}:{self.password}"
return "Basic " + b64encode(auth_str.encode("utf-8")).decode("utf-8")
class DigestAuthMethod(AuthMethod):
# make class var to 'cache' the state, which is more efficient because otherwise
# each request would first require another 'initialization' request.
_auth_helpers: dict[tuple[str, str], requests.auth.HTTPDigestAuth] = {}
def __init__(self, username: str, password: str):
super().__init__(username, password)
self._auth_helper = self._auth_helpers.get(
(username, password), requests.auth.HTTPDigestAuth(username, password)
)
self._auth_helpers[(username, password)] = self._auth_helper
@property
def auth_helper_vars(self):
return self._auth_helper._thread_local
def handle_401(self, response):
s_auth = response.headers.get("www-authenticate", "")
if "digest" in s_auth.lower():
# Original source:
# https://github.com/psf/requests/blob/f12ccbef6d6b95564da8d22e280d28c39d53f0e9/src/requests/auth.py#L262-L263
pat = re.compile(r"digest ", flags=re.IGNORECASE)
self.auth_helper_vars.chal = parse_dict_header(pat.sub("", s_auth, count=1))
def get_auth_header(self, method, url):
self._auth_helper.init_per_thread_state()
if not self.auth_helper_vars.chal:
# Need to do init request first
return ""
return self._auth_helper.build_digest_header(method, url)
def prepare_auth(auth, username, password): def prepare_auth(auth, username, password):
if username and password: if username and password:
if auth == "basic" or auth is None: if auth == "basic" or auth is None:
return BasicAuthMethod(username, password) return (username, password)
if auth == "digest": elif auth == "digest":
return DigestAuthMethod(username, password) from requests.auth import HTTPDigestAuth
if auth == "guess":
raise exceptions.UserError( return HTTPDigestAuth(username, password)
"'Guess' authentication is not supported in this version of " elif auth == "guess":
"vdirsyncer.\n" try:
"Please explicitly specify either 'basic' or 'digest' auth instead. \n" from requests_toolbelt.auth.guess import GuessAuth
"See the following issue for more information: " except ImportError:
"https://github.com/pimutils/vdirsyncer/issues/1015" raise exceptions.UserError(
) "Your version of requests_toolbelt is too "
"old for `guess` authentication. At least "
"version 0.4.0 is required."
)
else:
return GuessAuth(username, password)
else: else:
raise exceptions.UserError(f"Unknown authentication method: {auth}") raise exceptions.UserError("Unknown authentication method: {}".format(auth))
elif auth: elif auth:
raise exceptions.UserError( raise exceptions.UserError(
f"You need to specify username and password for {auth} authentication." "You need to specify username and password "
"for {} authentication.".format(auth)
) )
else:
return None return None
def prepare_verify(verify, verify_fingerprint): def prepare_verify(verify, verify_fingerprint):
if isinstance(verify, str): if isinstance(verify, (str, bytes)):
return create_default_context(cafile=expand_path(verify)) verify = expand_path(verify)
elif verify is not None: elif not isinstance(verify, bool):
raise exceptions.UserError( raise exceptions.UserError(
f"Invalid value for verify ({verify}), must be a path to a PEM-file." "Invalid value for verify ({}), "
"must be a path to a PEM-file or boolean.".format(verify)
) )
if verify_fingerprint is not None: if verify_fingerprint is not None:
if not isinstance(verify_fingerprint, str): if not isinstance(verify_fingerprint, (bytes, str)):
raise exceptions.UserError( raise exceptions.UserError(
"Invalid value for verify_fingerprint " "Invalid value for verify_fingerprint "
f"({verify_fingerprint}), must be a string." "({}), must be a string or null.".format(verify_fingerprint)
) )
elif not verify:
raise exceptions.UserError(
"Disabling all SSL validation is forbidden. Consider setting "
"verify_fingerprint if you have a broken or self-signed cert."
)
return aiohttp.Fingerprint(bytes.fromhex(verify_fingerprint.replace(":", ""))) return {
"verify": verify,
return None "verify_fingerprint": verify_fingerprint,
}
def prepare_client_cert(cert): def prepare_client_cert(cert):
@ -155,80 +99,31 @@ def prepare_client_cert(cert):
return cert return cert
class TransientNetworkError(exceptions.Error): def _install_fingerprint_adapter(session, fingerprint):
"""Transient network condition that should be retried.""" prefix = "https://"
def _is_safe_to_retry_method(method: str) -> bool:
"""Returns True if the HTTP method is safe/idempotent to retry.
We consider these safe for our WebDAV usage:
- GET, HEAD, OPTIONS: standard safe methods
- PROPFIND, REPORT: read-only DAV queries used for listing/fetching
"""
return method.upper() in {"GET", "HEAD", "OPTIONS", "PROPFIND", "REPORT"}
class UsageLimitReached(exceptions.Error):
pass
async def _is_quota_exceeded_google(response: aiohttp.ClientResponse) -> bool:
"""Return True if the response JSON indicates Google-style `usageLimits` exceeded.
Expected shape:
{"error": {"errors": [{"domain": "usageLimits", ...}], ...}}
See https://developers.google.com/workspace/calendar/api/guides/errors#403_usage_limits_exceeded
"""
try: try:
data = await response.json(content_type=None) from requests_toolbelt.adapters.fingerprint import FingerprintAdapter
except Exception: except ImportError:
return False raise RuntimeError(
"`verify_fingerprint` can only be used with "
"requests-toolbelt versions >= 0.4.0"
)
if not isinstance(data, dict): if not isinstance(session.adapters[prefix], FingerprintAdapter):
return False fingerprint_adapter = FingerprintAdapter(fingerprint)
session.mount(prefix, fingerprint_adapter)
error = data.get("error")
if not isinstance(error, dict):
return False
errors = error.get("errors")
if not isinstance(errors, list):
return False
for entry in errors:
if isinstance(entry, dict) and entry.get("domain") == "usageLimits":
return True
return False
@retry( def request(
stop=stop_after_attempt(5), method, url, session=None, latin1_fallback=True, verify_fingerprint=None, **kwargs
wait=wait_exponential(multiplier=1, min=4, max=10),
retry=(
retry_if_exception_type(UsageLimitReached)
| retry_if_exception_type(TransientNetworkError)
),
reraise=True,
)
async def request(
method,
url,
session,
auth=None,
latin1_fallback=True,
**kwargs,
): ):
"""Wrapper method for requests, to ease logging and mocking as well as to """
support auth methods currently unsupported by aiohttp. Wrapper method for requests, to ease logging and mocking. Parameters should
be the same as for ``requests.request``, except:
Parameters should be the same as for ``aiohttp.request``, except:
:param session: A requests session object to use. :param session: A requests session object to use.
:param auth: The HTTP ``AuthMethod`` to use for authentication. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
:param verify_fingerprint: Optional. SHA256 of the expected server certificate. expected server certificate.
:param latin1_fallback: RFC-2616 specifies the default Content-Type of :param latin1_fallback: RFC-2616 specifies the default Content-Type of
text/* to be latin1, which is not always correct, but exactly what text/* to be latin1, which is not always correct, but exactly what
requests is doing. Setting this parameter to False will use charset requests is doing. Setting this parameter to False will use charset
@ -237,93 +132,47 @@ async def request(
https://github.com/kennethreitz/requests/issues/2042 https://github.com/kennethreitz/requests/issues/2042
""" """
# TODO: Support for client-side certifications. if session is None:
session = requests.Session()
if verify_fingerprint is not None:
_install_fingerprint_adapter(session, verify_fingerprint)
session.hooks = {"response": _fix_redirects} session.hooks = {"response": _fix_redirects}
# TODO: rewrite using func = session.request
# https://docs.aiohttp.org/en/stable/client_advanced.html#client-tracing
logger.debug("=" * 20) logger.debug("=" * 20)
logger.debug(f"{method} {url}") logger.debug(f"{method} {url}")
logger.debug(kwargs.get("headers", {})) logger.debug(kwargs.get("headers", {}))
logger.debug(kwargs.get("data")) logger.debug(kwargs.get("data", None))
logger.debug("Sending request...") logger.debug("Sending request...")
assert isinstance(kwargs.get("data", b""), bytes) assert isinstance(kwargs.get("data", b""), bytes)
cert = kwargs.pop("cert", None) r = func(method, url, **kwargs)
if cert is not None:
ssl_context = kwargs.pop("ssl", create_default_context())
ssl_context.load_cert_chain(*cert)
kwargs["ssl"] = ssl_context
headers = kwargs.pop("headers", {})
response: aiohttp.ClientResponse | None = None
for _attempt in range(2):
if auth:
headers["Authorization"] = auth.get_auth_header(method, url)
try:
response = await session.request(method, url, headers=headers, **kwargs)
except (
ServerDisconnectedError,
ServerTimeoutError,
asyncio.TimeoutError,
) as e:
# Retry only if the method is safe/idempotent for our DAV use
if _is_safe_to_retry_method(method):
logger.debug(
f"Transient network error on {method} {url}: {e}. Will retry."
)
raise TransientNetworkError(str(e)) from e
raise e from None
if response is None:
raise RuntimeError("No HTTP response obtained")
if response.ok or not auth:
# we don't need to do the 401-loop if we don't do auth in the first place
break
if response.status == 401:
auth.handle_401(response)
# retry once more after handling the 401 challenge
continue
else:
# some other error, will be handled later on
break
if response is None:
raise RuntimeError("No HTTP response obtained")
# See https://github.com/kennethreitz/requests/issues/2042 # See https://github.com/kennethreitz/requests/issues/2042
content_type = response.headers.get("Content-Type", "") content_type = r.headers.get("Content-Type", "")
if ( if (
not latin1_fallback not latin1_fallback
and "charset" not in content_type and "charset" not in content_type
and content_type.startswith("text/") and content_type.startswith("text/")
): ):
logger.debug("Removing latin1 fallback") logger.debug("Removing latin1 fallback")
response.encoding = None r.encoding = None
logger.debug(response.status) logger.debug(r.status_code)
logger.debug(response.headers) logger.debug(r.headers)
logger.debug(response.content) logger.debug(r.content)
if logger.getEffectiveLevel() <= logging.DEBUG and response.status >= 400: if r.status_code == 412:
# https://github.com/pimutils/vdirsyncer/issues/1186 raise exceptions.PreconditionFailed(r.reason)
logger.debug(await response.text()) if r.status_code in (404, 410):
raise exceptions.NotFoundError(r.reason)
if response.status == 403 and await _is_quota_exceeded_google(response): r.raise_for_status()
raise UsageLimitReached(response.reason) return r
if response.status == 412:
raise exceptions.PreconditionFailed(response.reason)
if response.status in (404, 410):
raise exceptions.NotFoundError(response.reason)
if response.status == 429:
raise UsageLimitReached(response.reason)
response.raise_for_status()
return response
def _fix_redirects(r, *args, **kwargs): def _fix_redirects(r, *args, **kwargs):

Some files were not shown because too many files have changed in this diff Show more