Compare commits

...

86 commits

Author SHA1 Message Date
Hugo Osvaldo Barrera
c3262d88cc Mark unused variables as such 2025-11-06 01:05:48 +01:00
cbb4e314f6 cli/discover: add implicit config to pair for collection creation
Adds support for auto-creating collections when they exist only on
one side and `implicit = 'create'` is set in the pair config.
2025-11-06 00:04:22 +00:00
Hugo Osvaldo Barrera
ac9919d865 Add changelog entry for latest commits 2025-10-09 11:05:33 +02:00
samm81
b124ce835b fix: remove unused import 2025-10-09 09:02:01 +00:00
samm81
6708dbbbdc fix: fix ssl behavior in request
- `ClientConnectionError` in `aiohttp` can wrap SSL handshake and
  certificate verification errors
- Retrying those hides the real cause and produced
  `TransientNetworkError` instead of the expected certificate error
- Removing `ClientConnectionError` from the transient list lets SSL
  errors surface correctly
2025-10-09 09:02:01 +00:00
samm81
81d8444810 http: refactor auth loop 2025-10-09 09:02:01 +00:00
samm81
4990cdf229 http: retry safe DAV methods on transient aiohttp disconnects; cli: gather with return_exceptions to allow in-flight backoffs to finish
- Retry ServerDisconnectedError/ServerTimeoutError/ClientConnectionError/asyncio.TimeoutError for GET/HEAD/OPTIONS/PROPFIND/REPORT
- Keep original rate-limit handling (429, Google 403 usageLimits)
- In CLI, avoid cancelling sibling tasks so per-request backoff can complete; re-raise first failure after all tasks finish
2025-10-09 09:02:01 +00:00
Hugo Osvaldo Barrera
4c2c60402e ci: run ruff and mypy
Fixes: https://github.com/pimutils/vdirsyncer/issues/1194
2025-09-20 13:53:39 +02:00
Hugo Osvaldo Barrera
2f4f4ac72b Fix some mypy type failures 2025-09-20 13:51:21 +02:00
Hugo Osvaldo Barrera
6354db82c4 make: install check requirements via install-dev 2025-09-20 13:19:41 +02:00
Hugo Osvaldo Barrera
a9b6488dac Merge docs-requirements.txt into pyproject.toml
Keep requirements definitions all in one place.
2025-09-20 13:18:17 +02:00
Hugo Osvaldo Barrera
a4ceabf80b Organise imports
And update imports from deprecated locations.
2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
3488f77cd6 Remove unused variables 2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
19120422a7 Use ternary operator for trivial assignment 2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
2e619806a0 Drop support for Python 3.8
Note that recent commits introduced syntax unsupported by Python 3.8
already.
2025-09-20 13:05:03 +02:00
Hugo Osvaldo Barrera
4669bede07 Organise imports 2025-09-20 12:56:22 +02:00
Hugo Osvaldo Barrera
59c1c55407 Document wrapper 2025-09-20 12:50:00 +02:00
Hugo Osvaldo Barrera
1502f5b5f4 Execute one assertion per line 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
a4d4bf8fd1 Normalise pytest syntax 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
aab70e9fb0 Use cached_property from the stdlib
Our local implementation preceded the one in the stdlib, but we no
longer support versions of Python which do not ship it.
2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
ed88406aec Avoid using mutable class attributes
A tuple works fine here.
2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
ffe883a2f1 Avoid warning due to unused import 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
e5f2869580 ruff: ignore block for legacy Python 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
95bb7bd7f9 Declare functions instead of assigning to lambdas 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
e3b2473383 Use list expansion instead of concatenation 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
424cfc5799 ruff: ignore false positive 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
29312e87c5 Close status even if assertions fail 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
c77b22334a Add changelog entry for latest change 2025-09-20 12:42:10 +02:00
samm81
02350c924b
http(request): collates status checks 2025-09-13 16:23:46 +07:00
Samuel Maynard
605f878f9b
test_retry: remove unneeded decorator
Co-authored-by: Hugo <hugo@whynothugo.nl>
2025-09-13 12:22:25 +03:00
samm81
bb2b71da81
builds(archlinux-py313): adds python-tenacity package 2025-09-12 17:02:28 +07:00
samm81
065ebe4752
AUTHORS: add samm81 2025-09-12 16:25:00 +07:00
samm81
0d741022a9
http: add rate limiting (mainly for google)
- google calendar uses the `403` and `429` codes to perform rate limiting [1][2]. this pr adds `tenacity` to perform exponential back off as suggested in google calendar's docs [3].

[1]: https://developers.google.com/workspace/calendar/api/guides/errors#403_rate_limit_exceeded
[2]: https://developers.google.com/workspace/calendar/api/guides/errors#429_too_many_requests
[3]: https://developers.google.com/workspace/calendar/api/guides/quota#backoff
2025-09-12 16:20:44 +07:00
Hugo Osvaldo Barrera
b5d3b7e578 Apply auto-fixes for RUF rule 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
9677cf9812 Simplify some statements 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
6da84c7881 ruff: sort rules 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
dceb113334 ruff: fix mix-up in configuration
Ruff hasn't been finding errors in a while. Most of them are linting
checks anyway, but there was quite a bit of deprecated usages.
2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
01fa614b6b Fix line which are too long 2025-08-29 10:17:41 +02:00
Hugo Osvaldo Barrera
20cc1247ed ruff: apply auto-fixes 2025-08-29 10:03:24 +02:00
Дилян Палаузов
2f548e048d Some code simplifications with the return statement 2025-08-29 09:48:27 +02:00
Jakub Klinkovský
5d343264f3 Remove python-requests-toolbelt from Arch Linux build
The dependency was dropped in 89a01631fa
2025-08-29 09:28:50 +02:00
Hugo Osvaldo Barrera
bc3fa8bd39 Remove stale references to setup.py 2025-08-28 22:57:55 +02:00
Hugo Osvaldo Barrera
8803d5a086 ruff: use extend-select
Ensure that we don't disable any default rules.
2025-08-28 11:40:46 +02:00
Hugo Osvaldo Barrera
96754a3d0a ruff: enable TID rules 2025-08-28 11:39:06 +02:00
Hugo Osvaldo Barrera
d42707c108 Bump constraint for aiostream
There's a newer version available, and it also doesn't have any breaking
changes which could affect us.
2025-08-28 11:37:14 +02:00
Hugo Osvaldo Barrera
ddfe3cc749 Bump constraint for aiostream
Fixes: https://github.com/pimutils/vdirsyncer/issues/1111
2025-08-28 11:32:28 +02:00
Radon Rosborough
84ff0ac943 Log error response body in debug 2025-08-27 09:11:32 +02:00
Hugo Osvaldo Barrera
388c16f188 Document sqlite fix in changelog 2025-08-25 17:37:26 +02:00
Hugo Osvaldo Barrera
78f41d32ce Explicitly close status database
Using `__del__` often closes the database on a different thread, which
is not supported by the sqlite module and produces a different warning.

Explicitly close the status database everywhere it is used.
2025-08-25 17:33:20 +02:00
Hugo Osvaldo Barrera
164559ad7a Remove references to obsolete event_loop fixture
It's gone from the latest pytest-asyncio.
2025-08-25 17:12:21 +02:00
samm81
2c6dc4cddf updates SqliteStatus to properly close connections
otherwise, when trying to run `pytest` in a `python3.13` environment
results in a bunch of

```
tests/unit/sync/test_sync.py::test_partial_sync_ignore
  /home/user/.asdf/installs/python/3.13.1/lib/python3.13/asyncio/base_events.py:650: ResourceWarning: unclosed database in <sqlite3.Connection object at 0x7fda8f6b6c50>
    sys.set_asyncgen_hooks(
  Enable tracemalloc to get traceback where the object was allocated.
  See https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings for more info.
  ```
2025-08-25 16:53:00 +02:00
samm81
9bbb7fa91a fix: fix mypy typing error 2025-08-25 16:51:29 +02:00
Hugo Osvaldo Barrera
f8bcafa9d7 ci: use Alpine 3.19 for Python 3.11 2025-08-25 16:49:16 +02:00
Hugo Osvaldo Barrera
162879df21 ci: include python version in job name 2025-07-23 23:24:35 +02:00
Hugo Osvaldo Barrera
3b9db0e4db Add support for Python 3.13
Fixes: https://github.com/pimutils/vdirsyncer/issues/1180
2025-07-23 23:23:59 +02:00
Hugo Osvaldo Barrera
63d2e6c795 pyproject: squelch warning 2025-04-11 01:59:29 +02:00
Hugo Osvaldo Barrera
03d1c4666d pyproject: update syntax for licence 2025-04-11 01:59:17 +02:00
Hugo Osvaldo Barrera
ecdd565be4 Document checkfile() 2025-04-09 14:00:40 +02:00
Hugo Osvaldo Barrera
17e43fd633 Move test dependencies into pyproject.toml 2025-04-07 18:47:44 +02:00
Hugo Osvaldo Barrera
2b4496fea4 Update linting tools 2025-04-07 18:42:07 +02:00
Hugo Osvaldo Barrera
fc4a02c0c9 Add some missing type hints 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
c19802e4d8 Configure ruff as an auto-formatter 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
cce8fef8de Auto-format using ruff 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
9a0dbc8cd0 Update ruff configuration syntax 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
32453cccfc Drop support for Python 3.7
Installing on Python 3.7 no longer works due to lack of support in the
minimal version of setuptools_scm. This commit makes the change
official, but it happened a while ago.
2025-04-07 18:39:52 +02:00
Hugo Osvaldo Barrera
057f3af293 Remove stale GitLab CI config 2025-04-07 18:35:12 +02:00
Hugo Osvaldo Barrera
e76d8a5b03 Add two more trove classifiers 2025-04-07 18:09:36 +02:00
Hugo Osvaldo Barrera
d8961232c4 Remove setup.py in favour of pyproject.toml
Implements: https://github.com/pimutils/vdirsyncer/issues/1164
2025-04-07 18:06:45 +02:00
Hugo Osvaldo Barrera
646e0b48a5 Delete stale comment 2025-04-07 18:01:16 +02:00
Hugo Osvaldo Barrera
fb6a859b88 Add changelog entry for recent change 2025-04-07 17:39:18 +02:00
Petr Moucha
ff999b5b74 Use proxy configuration from environment for Google storage 2025-04-04 13:17:32 +02:00
Hugo Osvaldo Barrera
41b48857eb Remove reference to dead domain 2025-03-06 11:57:05 +01:00
Hugo Osvaldo Barrera
70d09e6d5d Remove stale comment 2025-02-13 13:42:06 +01:00
Ben Boeckel
8b063c39cb atomicwrites: remove dependency on abandoned library 2025-02-13 13:37:06 +01:00
Hugo Osvaldo Barrera
12a06917db Add explicit configuration for readthedocs
See: https://about.readthedocs.com/blog/2024/12/deprecate-config-files-without-sphinx-or-mkdocs-config/
2025-02-13 13:34:13 +01:00
Hugo Osvaldo Barrera
2fee1d67f2 Update CI job with "oldest supported dependencies"
Alpine 3.17 has faded away, bump to Alpine 3.18.
2025-02-13 13:32:59 +01:00
Hugo Osvaldo Barrera
a934d5ec66 Keep test for duplicate consecutive keys
See: https://github.com/pimutils/vdirsyncer/pull/1153
2024-12-21 16:49:50 +01:00
Colin Watson
c79d3680cd Fix _Component.__delitem__ with adjacent identical keys
Hypothesis found the following example:

```
tests/unit/utils/test_vobject.py:335: in add_prop
    assert c[key] == value
E   AssertionError: assert '0' == '1'
E
E     - 1
E     + 0
E   Falsifying example:
E   state = VobjectMachine()
E   unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
E   parsed_0 = state.parse(unparsed=unparsed_0)
E   state.add_prop_raw(c=parsed_0, key='0', params=[], value='0')
E   state.add_prop_raw(c=parsed_0, key='0', params=[], value='0')
E   state.add_prop(c=parsed_0, key='0', value='1')
E   state.teardown()
```

After the two `add_prop_raw` calls, `c.props` is `["0;:0", "0;:0",
"FOO:YES"]`.  `_Component.__delitem__` then fails to effectively delete
the previous key: it deletes the first `"0;:0"` item, but then checks
for continuation lines following it and incorrectly keeps the second
`"0;:0"` item even though it begins with one of the prefixes it's trying
to delete.  Checking for the prefix in the check for continuation lines
fixes this.

Fixes: #1149
2024-12-20 01:43:15 +00:00
Hugo Osvaldo Barrera
cd050d57b9 Use direnv to set up a virtualenv for development 2024-12-09 14:18:24 +01:00
Hugo Osvaldo Barrera
8c98992f74 Move setuptools-scm config into pyproject.toml 2024-12-09 14:18:06 +01:00
Hugo Osvaldo Barrera
c2eed9fb59 Add a readthedocs configuration file
Used for building docs in CI pipelines.
2024-12-09 01:36:22 +01:00
Mike A.
a490544405 Do not load netrc config files 2024-12-09 01:32:29 +01:00
Hugo Osvaldo Barrera
688d6f907f Update deprecated usages of hypothesis 2024-12-09 01:30:44 +01:00
euxane
2e7e31fdbf storage/http: add support for filter_hook
This allows users to process fetched items through a filter command,
to fix malformed webcal items as they are imported.

In my case, my provider adds the export time to the description and
random sequence numbers to all events. This caused the whole collection
to be invalidated and propagated at each sync. I use the filter to
remove those, canonicalising the items.
2024-12-08 19:31:32 +01:00
Arran Ubels
616d7aacb0 OfflineIMAP url Update 2024-10-31 22:43:45 +01:00
Hugo Osvaldo Barrera
89129e37b6 Typo
Fixes: https://github.com/pimutils/vdirsyncer/issues/1139
2024-09-13 18:36:17 +02:00
71 changed files with 958 additions and 495 deletions

View file

@ -10,13 +10,12 @@ packages:
- python-installer - python-installer
- python-setuptools-scm - python-setuptools-scm
# Runtime dependencies: # Runtime dependencies:
- python-atomicwrites
- python-click - python-click
- python-click-log - python-click-log
- python-click-threading - python-click-threading
- python-requests - python-requests
- python-requests-toolbelt
- python-aiohttp-oauthlib - python-aiohttp-oauthlib
- python-tenacity
# Test dependencies: # Test dependencies:
- python-hypothesis - python-hypothesis
- python-pytest-cov - python-pytest-cov
@ -36,6 +35,8 @@ environment:
REQUIREMENTS: release REQUIREMENTS: release
# TODO: ETESYNC_TESTS # TODO: ETESYNC_TESTS
tasks: tasks:
- check-python:
python --version | grep 'Python 3.13'
- docker: | - docker: |
sudo systemctl start docker sudo systemctl start docker
- setup: | - setup: |

View file

@ -3,7 +3,7 @@
# TODO: It might make more sense to test with an older Ubuntu or Fedora version # TODO: It might make more sense to test with an older Ubuntu or Fedora version
# here, and consider that our "oldest suppported environment". # here, and consider that our "oldest suppported environment".
image: alpine/3.17 # python 3.10 image: alpine/3.19 # python 3.11
packages: packages:
- docker - docker
- docker-cli - docker-cli
@ -18,7 +18,6 @@ environment:
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos DAV_SERVER: radicale xandikos
REQUIREMENTS: minimal REQUIREMENTS: minimal
# TODO: ETESYNC_TESTS
tasks: tasks:
- venv: | - venv: |
python3 -m venv $HOME/venv python3 -m venv $HOME/venv
@ -28,6 +27,8 @@ tasks:
sudo service docker start sudo service docker start
- setup: | - setup: |
cd vdirsyncer cd vdirsyncer
# Hack, no idea why it's needed
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
make -e install-dev make -e install-dev
- test: | - test: |
cd vdirsyncer cd vdirsyncer

View file

@ -29,6 +29,9 @@ tasks:
cd vdirsyncer cd vdirsyncer
make -e ci-test make -e ci-test
make -e ci-test-storage make -e ci-test-storage
- check: |
cd vdirsyncer
make check
- check-secrets: | - check-secrets: |
# Stop here if this is a PR. PRs can't run with the below secrets. # Stop here if this is a PR. PRs can't run with the below secrets.
[ -f ~/fastmail-secrets ] || complete-build [ -f ~/fastmail-secrets ] || complete-build

1
.envrc Normal file
View file

@ -0,0 +1 @@
layout python3

View file

@ -1,6 +0,0 @@
python37:
image: python:3.7
before_script:
- make -e install-dev
script:
- make -e ci-test

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 rev: v5.0.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
args: [--markdown-linebreak-ext=md] args: [--markdown-linebreak-ext=md]
@ -8,12 +8,8 @@ repos:
- id: check-toml - id: check-toml
- id: check-added-large-files - id: check-added-large-files
- id: debug-statements - id: debug-statements
- repo: https://github.com/psf/black
rev: "24.2.0"
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.8.0" rev: "v1.15.0"
hooks: hooks:
- id: mypy - id: mypy
files: vdirsyncer/.* files: vdirsyncer/.*
@ -21,12 +17,12 @@ repos:
- types-setuptools - types-setuptools
- types-docutils - types-docutils
- types-requests - types-requests
- types-atomicwrites
- repo: https://github.com/charliermarsh/ruff-pre-commit - repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.2.2' rev: 'v0.11.4'
hooks: hooks:
- id: ruff - id: ruff
args: [--fix, --exit-non-zero-on-fix] args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
- repo: local - repo: local
hooks: hooks:
- id: typos-syncroniz - id: typos-syncroniz

16
.readthedocs.yaml Normal file
View file

@ -0,0 +1,16 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
python:
install:
- method: pip
path: .
extra_requirements:
- docs

View file

@ -19,6 +19,7 @@ In alphabetical order:
- rEnr3n - rEnr3n
- Thomas Weißschuh - Thomas Weißschuh
- Witcher01 - Witcher01
- samm81
Special thanks goes to: Special thanks goes to:

View file

@ -9,6 +9,25 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_. <https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.21.0
==============
- Implement retrying for ``google`` storage type when a rate limit is reached.
- ``tenacity`` is now a required dependency.
- Drop support for Python 3.8.
- Retry transient network errors for nullipotent requests.
Version 0.20.0
==============
- Remove dependency on abandoned ``atomicwrites`` library.
- Implement ``filter_hook`` for the HTTP storage.
- Drop support for Python 3.7.
- Add support for Python 3.12 and Python 3.13.
- Properly close the status database after using. This especially affects tests,
where we were leaking a large amount of file descriptors.
- Extend supported versions of ``aiostream`` to include 0.7.x.
Version 0.19.3 Version 0.19.3
============== ==============
@ -18,6 +37,7 @@ Version 0.19.3
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103` - Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_. - A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
- Implement digest auth. :gh:`1137` - Implement digest auth. :gh:`1137`
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
Version 0.19.2 Version 0.19.2
============== ==============
@ -53,6 +73,10 @@ Version 0.19.0
- Add a new ``showconfig`` status. This prints *some* configuration values as - Add a new ``showconfig`` status. This prints *some* configuration values as
JSON. This is intended to be used by external tools and helpers that interact JSON. This is intended to be used by external tools and helpers that interact
with ``vdirsyncer``, and considered experimental. with ``vdirsyncer``, and considered experimental.
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
"create", it implicitly creates missing collections during sync without user
prompts. This simplifies workflows where collections should be automatically
created on both sides.
- Update TLS-related tests that were failing due to weak MDs. :gh:`903` - Update TLS-related tests that were failing due to weak MDs. :gh:`903`
- ``pytest-httpserver`` and ``trustme`` are now required for tests. - ``pytest-httpserver`` and ``trustme`` are now required for tests.
- ``pytest-localserver`` is no longer required for tests. - ``pytest-localserver`` is no longer required for tests.

View file

@ -40,6 +40,11 @@ ci-test-storage:
done done
bash $(CODECOV_PATH) -c bash $(CODECOV_PATH) -c
check:
ruff check
ruff format --diff
#mypy vdirsyncer
release-deb: release-deb:
sh scripts/release-deb.sh debian jessie sh scripts/release-deb.sh debian jessie
sh scripts/release-deb.sh debian stretch sh scripts/release-deb.sh debian stretch
@ -49,10 +54,10 @@ release-deb:
install-dev: install-dev:
pip install -U pip setuptools wheel pip install -U pip setuptools wheel
pip install -e . pip install -e '.[test,check,docs]'
pip install -Ur test-requirements.txt -r docs-requirements.txt pre-commit
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \ set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \ pip install pyproject-dependencies && \
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
fi fi
.PHONY: docs .PHONY: docs

View file

@ -40,7 +40,7 @@ servers. It can also be used to synchronize calendars and/or addressbooks
between two servers directly. between two servers directly.
It aims to be for calendars and contacts what `OfflineIMAP It aims to be for calendars and contacts what `OfflineIMAP
<http://offlineimap.org/>`_ is for emails. <https://www.offlineimap.org/>`_ is for emails.
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/ .. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/

View file

@ -16,6 +16,7 @@ SPDX-License-Identifier: BSD-3-Clause
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de> SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
Author: <bernhard.reiter@intevation.de> Author: <bernhard.reiter@intevation.de>
""" """
from __future__ import annotations from __future__ import annotations
import re import re
@ -53,8 +54,8 @@ def main(ical1_filename, ical2_filename):
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n" f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
"or the second entry:\n" "or the second entry:\n"
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?", f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
*additional_args,
] ]
+ additional_args
) )
if r.returncode == 2: if r.returncode == 2:

View file

@ -1,4 +0,0 @@
# This file is used by readthedocs.org
sphinx != 1.4.7
sphinx_rtd_theme
setuptools_scm

View file

@ -20,7 +20,7 @@ copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
release = get_distribution("vdirsyncer").version release = get_distribution("vdirsyncer").version
version = ".".join(release.split(".")[:2]) # The short X.Y version. version = ".".join(release.split(".")[:2]) # The short X.Y version.
rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
exclude_patterns = ["_build"] exclude_patterns = ["_build"]
@ -37,9 +37,7 @@ except ImportError:
html_theme = "default" html_theme = "default"
if not on_rtd: if not on_rtd:
print("-" * 74) print("-" * 74)
print( print("Warning: sphinx-rtd-theme not installed, building with default theme.")
"Warning: sphinx-rtd-theme not installed, building with default " "theme."
)
print("-" * 74) print("-" * 74)
html_static_path = ["_static"] html_static_path = ["_static"]

View file

@ -128,6 +128,16 @@ Pair Section
The ``conflict_resolution`` parameter applies for these properties too. The ``conflict_resolution`` parameter applies for these properties too.
.. _implicit_def:
- ``implicit``: Opt into implicitly creating collections. Example::
implicit = "create"
When set to "create", missing collections are automatically created on both
sides during sync without prompting the user. This simplifies workflows where
all collections should be synchronized bidirectionally.
.. _storage_config: .. _storage_config:
Storage Section Storage Section
@ -484,6 +494,7 @@ leads to an error.
[storage holidays_remote] [storage holidays_remote]
type = "http" type = "http"
url = https://example.com/holidays_from_hicksville.ics url = https://example.com/holidays_from_hicksville.ics
#filter_hook = null
Too many WebCAL providers generate UIDs of all ``VEVENT``-components Too many WebCAL providers generate UIDs of all ``VEVENT``-components
on-the-fly, i.e. all UIDs change every time the calendar is downloaded. on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
@ -508,3 +519,8 @@ leads to an error.
:param auth_cert: Optional. Either a path to a certificate with a client :param auth_cert: Optional. Either a path to a certificate with a client
certificate and the key or a list of paths to the files with them. certificate and the key or a list of paths to the files with them.
:param useragent: Default ``vdirsyncer``. :param useragent: Default ``vdirsyncer``.
:param filter_hook: Optional. A filter command to call for each fetched
item, passed in raw form to stdin and returned via stdout.
If nothing is returned by the filter command, the item is skipped.
This can be used to alter fields as needed when dealing with providers
generating malformed events.

View file

@ -81,7 +81,7 @@ virtualenv_ and run this inside of it::
# Install development dependencies, including: # Install development dependencies, including:
# - vdirsyncer from the repo into the virtualenv # - vdirsyncer from the repo into the virtualenv
# - stylecheckers (ruff) and code formatters (black) # - style checks and formatting (ruff)
make install-dev make install-dev
# Install git commit hook for some extra linting and checking # Install git commit hook for some extra linting and checking

View file

@ -42,7 +42,7 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
use Python's package manager "pip". First, you'll have to check that the use Python's package manager "pip". First, you'll have to check that the
following things are installed: following things are installed:
- Python 3.7 to 3.11 and pip. - Python 3.9 to 3.13 and pip.
- ``libxml`` and ``libxslt`` - ``libxml`` and ``libxslt``
- ``zlib`` - ``zlib``
- Linux or macOS. **Windows is not supported**, see :gh:`535`. - Linux or macOS. **Windows is not supported**, see :gh:`535`.
@ -84,7 +84,7 @@ the above location.
The dirty, easy way The dirty, easy way
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
If pipx is not available on your distirbution, the easiest way to install If pipx is not available on your distribution, the easiest way to install
vdirsyncer at this point would be to run:: vdirsyncer at this point would be to run::
pip install --ignore-installed vdirsyncer pip install --ignore-installed vdirsyncer

View file

@ -46,8 +46,9 @@ You can install the all development dependencies with::
make install-dev make install-dev
You probably don't want this since it will use pip to download the You probably don't want this since it will use pip to download the
dependencies. Alternatively you can find the testing dependencies in dependencies. Alternatively test dependencies are listed as ``test`` optional
``test-requirements.txt``, again with lower-bound version requirements. dependencies in ``pyproject.toml``, again with lower-bound version
requirements.
You also have to have vdirsyncer fully installed at this point. Merely You also have to have vdirsyncer fully installed at this point. Merely
``cd``-ing into the tarball will not be sufficient. ``cd``-ing into the tarball will not be sufficient.
@ -73,10 +74,11 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
take care of the HTML docs' formatting. take care of the HTML docs' formatting.
You can find a list of dependencies in ``docs-requirements.txt``. Again, you You can find a list of dependencies in ``pyproject.toml``, in the
can install those using pip with:: ``project.optional-dependencies`` section as ``docs``. Again, you can install
those using pip with::
pip install -r docs-requirements.txt pip install '.[docs]'
Then change into the ``docs/`` directory and build whatever format you want Then change into the ``docs/`` directory and build whatever format you want
using the ``Makefile`` in there (run ``make`` for the formats you can build). using the ``Makefile`` in there (run ``make`` for the formats you can build).

View file

@ -50,7 +50,6 @@ program chosen:
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
e.g. DAVx⁵_ or the apps by dmfs_. e.g. DAVx⁵_ or other apps bundled with smartphones.
.. _DAVx⁵: https://www.davx5.com/ .. _DAVx⁵: https://www.davx5.com/
.. _dmfs: https://dmfs.org/

View file

@ -4,8 +4,10 @@
image: alpine/edge image: alpine/edge
packages: packages:
- py3-build
- py3-pip - py3-pip
- py3-setuptools - py3-setuptools
- py3-setuptools_scm
- py3-wheel - py3-wheel
- twine - twine
sources: sources:
@ -23,5 +25,5 @@ tasks:
git describe --exact-match --tags || complete-build git describe --exact-match --tags || complete-build
- publish: | - publish: |
cd vdirsyncer cd vdirsyncer
python setup.py sdist bdist_wheel python -m build --no-isolation
twine upload --non-interactive dist/* twine upload --non-interactive dist/*

View file

@ -1,18 +1,88 @@
[tool.ruff] # Vdirsyncer synchronizes calendars and contacts.
select = [ #
"E", # Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
"F", # how to package vdirsyncer.
"W",
"B0",
"I",
"UP",
"C4",
# "TID",
"RSE"
]
target-version = "py37"
[tool.ruff.isort] [build-system]
requires = ["setuptools>=64", "setuptools_scm>=8"]
build-backend = "setuptools.build_meta"
[project]
name = "vdirsyncer"
authors = [
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
]
description = "Synchronize calendars and contacts"
readme = "README.rst"
requires-python = ">=3.9"
keywords = ["todo", "task", "icalendar", "cli"]
license = "BSD-3-Clause"
license-files = ["LICENSE"]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Office/Business :: Scheduling",
"Topic :: Utilities",
]
dependencies = [
"click>=5.0,<9.0",
"click-log>=0.3.0,<0.5.0",
"requests>=2.20.0",
"aiohttp>=3.8.2,<4.0.0",
"aiostream>=0.4.3,<0.8.0",
"tenacity>=9.0.0",
]
dynamic = ["version"]
[project.optional-dependencies]
google = ["aiohttp-oauthlib"]
test = [
"hypothesis>=6.72.0,<7.0.0",
"pytest",
"pytest-cov",
"pytest-httpserver",
"trustme",
"pytest-asyncio",
"aioresponses",
]
docs = [
"sphinx!=1.4.7",
"sphinx_rtd_theme",
"setuptools_scm",
]
check = [
"mypy",
"ruff",
"types-docutils",
"types-requests",
"types-setuptools",
]
[project.scripts]
vdirsyncer = "vdirsyncer.cli:app"
[tool.ruff.lint]
extend-select = [
"B0",
"C4",
"E",
"I",
"RSE",
"SIM",
"TID",
"UP",
"W",
]
[tool.ruff.lint.isort]
force-single-line = true force-single-line = true
required-imports = ["from __future__ import annotations"] required-imports = ["from __future__ import annotations"]
@ -26,6 +96,7 @@ addopts = """
--color=yes --color=yes
""" """
# filterwarnings=error # filterwarnings=error
asyncio_default_fixture_loop_scope = "function"
[tool.mypy] [tool.mypy]
ignore_missing_imports = true ignore_missing_imports = true
@ -34,3 +105,10 @@ ignore_missing_imports = true
exclude_lines = [ exclude_lines = [
"if TYPE_CHECKING:", "if TYPE_CHECKING:",
] ]
[tool.setuptools.packages.find]
include = ["vdirsyncer*"]
[tool.setuptools_scm]
write_to = "vdirsyncer/version.py"
version_scheme = "no-guess-dev"

View file

@ -1,82 +0,0 @@
"""
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
"""
from __future__ import annotations
from setuptools import Command
from setuptools import find_packages
from setuptools import setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
"click>=5.0,<9.0",
"click-log>=0.3.0, <0.5.0",
"requests >=2.20.0",
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
"atomicwrites>=0.1.7",
"aiohttp>=3.8.0,<4.0.0",
"aiostream>=0.4.3,<0.5.0",
]
class PrintRequirements(Command):
description = "Prints minimal requirements"
user_options: list = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
with open("README.rst") as f:
long_description = f.read()
setup(
# General metadata
name="vdirsyncer",
author="Markus Unterwaditzer",
author_email="markus@unterwaditzer.net",
url="https://github.com/pimutils/vdirsyncer",
description="Synchronize calendars and contacts",
license="BSD",
long_description=long_description,
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
"google": ["aiohttp-oauthlib"],
},
# Build dependencies
setup_requires=["setuptools_scm != 1.12.0"],
# Other
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
cmdclass={"minimal_requirements": PrintRequirements},
use_scm_version={"write_to": "vdirsyncer/version.py"},
entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:app"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Internet",
"Topic :: Utilities",
],
)

View file

@ -1,7 +0,0 @@
hypothesis>=5.0.0,<7.0.0
pytest
pytest-cov
pytest-httpserver
trustme
pytest-asyncio
aioresponses

View file

@ -103,10 +103,8 @@ X-SOMETHING:{r}
HAHA:YES HAHA:YES
END:FOO""" END:FOO"""
printable_characters_strategy = st.text( printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
st.characters(blacklist_categories=("Cc", "Cs"))
)
uid_strategy = st.text( uid_strategy = st.text(
st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1 st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)

View file

@ -45,7 +45,7 @@ settings.register_profile(
"deterministic", "deterministic",
settings( settings(
derandomize=True, derandomize=True,
suppress_health_check=HealthCheck.all(), suppress_health_check=list(HealthCheck),
), ),
) )
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow])) settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
@ -59,12 +59,12 @@ else:
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def aio_session(event_loop): async def aio_session():
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
yield session yield session
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def aio_connector(event_loop): async def aio_connector():
async with aiohttp.TCPConnector(limit_per_host=16) as conn: async with aiohttp.TCPConnector(limit_per_host=16) as conn:
yield conn yield conn

View file

@ -10,16 +10,15 @@ import aiostream
import pytest import pytest
import pytest_asyncio import pytest_asyncio
from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE
from tests import assert_item_equals
from tests import normalize_item
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.storage.base import normalize_meta_value
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import EVENT_TEMPLATE
from .. import TASK_TEMPLATE
from .. import VCARD_TEMPLATE
from .. import assert_item_equals
from .. import normalize_item
def get_server_mixin(server_name): def get_server_mixin(server_name):
from . import __name__ as base from . import __name__ as base
@ -105,7 +104,7 @@ class StorageTests:
href, etag = await s.upload(get_item()) href, etag = await s.upload(get_item())
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = await s.get(href)
((href2, item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2)) ((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
assert href2 == href assert href2 == href
assert etag2 == etag assert etag2 == etag
@ -119,7 +118,7 @@ class StorageTests:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_upload(self, s, get_item): async def test_upload(self, s, get_item):
item = get_item() item = get_item()
href, etag = await s.upload(item) href, _etag = await s.upload(item)
assert_item_equals((await s.get(href))[0], item) assert_item_equals((await s.get(href))[0], item)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -147,7 +146,7 @@ class StorageTests:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_wrong_etag(self, s, get_item): async def test_wrong_etag(self, s, get_item):
item = get_item() item = get_item()
href, etag = await s.upload(item) href, _etag = await s.upload(item)
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.update(href, item, '"lolnope"') await s.update(href, item, '"lolnope"')
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
@ -423,7 +422,7 @@ class StorageTests:
).strip() ).strip()
) )
href, etag = await s.upload(item) href, _etag = await s.upload(item)
item2, etag2 = await s.get(href) item2, _etag2 = await s.get(href)
assert normalize_item(item) == normalize_item(item2) assert normalize_item(item) == normalize_item(item2)

View file

@ -8,12 +8,11 @@ import aiostream
import pytest import pytest
from tests import assert_item_equals from tests import assert_item_equals
from tests.storage import StorageTests
from tests.storage import get_server_mixin
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import StorageTests
from .. import get_server_mixin
dav_server = os.environ.get("DAV_SERVER", "skip") dav_server = os.environ.get("DAV_SERVER", "skip")
ServerMixin = get_server_mixin(dav_server) ServerMixin = get_server_mixin(dav_server)
@ -49,6 +48,6 @@ class DAVStorageTests(ServerMixin, StorageTests):
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext) monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
item = get_item(uid="град сатану" + str(uuid.uuid4())) item = get_item(uid="град сатану" + str(uuid.uuid4()))
href, etag = await s.upload(item) href, _etag = await s.upload(item)
item2, etag2 = await s.get(href) item2, _etag2 = await s.get(href)
assert_item_equals(item, item2) assert_item_equals(item, item2)

View file

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import datetime import datetime
from textwrap import dedent from textwrap import dedent
@ -11,10 +12,10 @@ from aioresponses import aioresponses
from tests import EVENT_TEMPLATE from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE from tests import VCARD_TEMPLATE
from tests.storage import format_item
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.dav import CalDAVStorage from vdirsyncer.storage.dav import CalDAVStorage
from .. import format_item
from . import DAVStorageTests from . import DAVStorageTests
from . import dav_server from . import dav_server
@ -30,18 +31,16 @@ class TestCalDAVStorage(DAVStorageTests):
async def test_doesnt_accept_vcard(self, item_type, get_storage_args): async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
s = self.storage_class(item_types=(item_type,), **await get_storage_args()) s = self.storage_class(item_types=(item_type,), **await get_storage_args())
try: # Most storages hard-fail, but xandikos doesn't.
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
await s.upload(format_item(VCARD_TEMPLATE)) await s.upload(format_item(VCARD_TEMPLATE))
except (exceptions.Error, aiohttp.ClientResponseError):
# Most storages hard-fail, but xandikos doesn't.
pass
assert not await aiostream.stream.list(s.list()) assert not await aiostream.stream.list(s.list())
# The `arg` param is not named `item_types` because that would hit # The `arg` param is not named `item_types` because that would hit
# https://bitbucket.org/pytest-dev/pytest/issue/745/ # https://bitbucket.org/pytest-dev/pytest/issue/745/
@pytest.mark.parametrize( @pytest.mark.parametrize(
"arg,calls_num", ("arg", "calls_num"),
[ [
(("VTODO",), 1), (("VTODO",), 1),
(("VEVENT",), 1), (("VEVENT",), 1),

View file

@ -52,7 +52,7 @@ def test_xml_specialchars(char):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"href", "href",
[ [
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics", # noqa: E501 "/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
], ],
) )
def test_normalize_href(href): def test_normalize_href(href):

View file

@ -13,7 +13,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/", "url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
} }
except KeyError as e: except KeyError as e:
pytestmark = pytest.mark.skip(f"Missing envkey: {str(e)}") pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
@pytest.mark.flaky(reruns=5) @pytest.mark.flaky(reruns=5)

View file

@ -8,11 +8,13 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, slow_create_collection, aio_connector, request): def get_storage_args(self, slow_create_collection, aio_connector, request):
if "item_type" in request.fixturenames: if (
if request.getfixturevalue("item_type") == "VTODO": "item_type" in request.fixturenames
# Fastmail has non-standard support for TODOs and request.getfixturevalue("item_type") == "VTODO"
# See https://github.com/pimutils/vdirsyncer/issues/824 ):
pytest.skip("Fastmail has non-standard VTODO support.") # Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"): async def inner(collection="test"):
args = { args = {

View file

@ -10,7 +10,7 @@ class ServerMixin:
def get_storage_args(self, item_type, slow_create_collection): def get_storage_args(self, item_type, slow_create_collection):
if item_type != "VEVENT": if item_type != "VEVENT":
# iCloud collections can either be calendars or task lists. # iCloud collections can either be calendars or task lists.
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
pytest.skip("iCloud doesn't support anything else than VEVENT") pytest.skip("iCloud doesn't support anything else than VEVENT")
async def inner(collection="test"): async def inner(collection="test"):

View file

@ -48,7 +48,8 @@ class TestFilesystemStorage(StorageTests):
s = self.storage_class(str(tmpdir), ".txt") s = self.storage_class(str(tmpdir), ".txt")
await s.upload(Item("UID:a/b/c")) await s.upload(Item("UID:a/b/c"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
assert "/" not in item_file.basename and item_file.isfile() assert "/" not in item_file.basename
assert item_file.isfile()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_ignore_tmp_files(self, tmpdir): async def test_ignore_tmp_files(self, tmpdir):
@ -89,7 +90,7 @@ class TestFilesystemStorage(StorageTests):
storage = self.storage_class(str(tmpdir), ".txt") storage = self.storage_class(str(tmpdir), ".txt")
item = Item("UID:" + "hue" * 600) item = Item("UID:" + "hue" * 600)
href, etag = await storage.upload(item) href, _etag = await storage.upload(item)
assert item.uid not in href assert item.uid not in href
@pytest.mark.asyncio @pytest.mark.asyncio

View file

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import aiohttp
import pytest import pytest
from aioresponses import CallbackResult from aioresponses import CallbackResult
from aioresponses import aioresponses from aioresponses import aioresponses
@ -8,6 +9,8 @@ from tests import normalize_item
from vdirsyncer.exceptions import UserError from vdirsyncer.exceptions import UserError
from vdirsyncer.http import BasicAuthMethod from vdirsyncer.http import BasicAuthMethod
from vdirsyncer.http import DigestAuthMethod from vdirsyncer.http import DigestAuthMethod
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
from vdirsyncer.storage.http import HttpStorage from vdirsyncer.storage.http import HttpStorage
from vdirsyncer.storage.http import prepare_auth from vdirsyncer.storage.http import prepare_auth
@ -38,7 +41,7 @@ async def test_list(aio_connector):
), ),
] ]
responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2 responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
def callback(url, headers, **kwargs): def callback(url, headers, **kwargs):
assert headers["User-Agent"].startswith("vdirsyncer/") assert headers["User-Agent"].startswith("vdirsyncer/")
@ -120,3 +123,41 @@ def test_verify_false_disallowed(aio_connector):
HttpStorage(url="http://example.com", verify=False, connector=aio_connector) HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
assert "must be a path to a pem-file." in str(excinfo.value).lower() assert "must be a path to a pem-file." in str(excinfo.value).lower()
@pytest.mark.asyncio
async def test_403_usage_limit_exceeded(aio_connector):
url = "http://127.0.0.1/test_403"
error_body = {
"error": {
"errors": [
{
"domain": "usageLimits",
"message": "Calendar usage limits exceeded.",
"reason": "quotaExceeded",
}
],
"code": 403,
"message": "Calendar usage limits exceeded.",
}
}
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, payload=error_body, repeat=True)
with pytest.raises(UsageLimitReached):
await request("GET", url, session)
@pytest.mark.asyncio
async def test_403_without_usage_limits_domain(aio_connector):
"""A 403 JSON error without the Google 'usageLimits' domain should not be
treated as UsageLimitReached and should surface as ClientResponseError.
"""
url = "http://127.0.0.1/test_403_no_usage_limits"
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, repeat=True)
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, session)

View file

@ -20,7 +20,7 @@ class CombinedStorage(Storage):
storage_name = "http_and_singlefile" storage_name = "http_and_singlefile"
def __init__(self, url, path, *, connector, **kwargs): def __init__(self, url, path, *, connector, **kwargs):
if kwargs.get("collection", None) is not None: if kwargs.get("collection") is not None:
raise ValueError raise ValueError
super().__init__(**kwargs) super().__init__(**kwargs)

View file

@ -26,7 +26,7 @@ def read_config(tmpdir, monkeypatch):
def test_read_config(read_config): def test_read_config(read_config):
errors, c = read_config( _errors, c = read_config(
""" """
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -222,3 +222,62 @@ def test_validate_collections_param():
x([["c", None, "b"]]) x([["c", None, "b"]])
x([["c", "a", None]]) x([["c", "a", None]])
x([["c", None, None]]) x([["c", None, None]])
def test_invalid_implicit_value(read_config):
expected_message = "`implicit` parameter must be 'create' or absent"
with pytest.raises(exceptions.UserError) as excinfo:
read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = null
implicit = "invalid"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert expected_message in str(excinfo.value)
def test_implicit_create_only(read_config):
"""Test that implicit create works."""
errors, c = read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = ["from a", "from b"]
implicit = "create"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert not errors
pair = c.pairs["my_pair"]
assert pair.implicit == "create"

View file

@ -161,12 +161,12 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "singlefile" type = "singlefile"
path = "{str(tmpdir)}/bar.txt" path = "{tmpdir!s}/bar.txt"
""" """
) )
) )
@ -191,7 +191,7 @@ def test_null_collection_with_named_collection(tmpdir, runner):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"a_requires,b_requires", ("a_requires", "b_requires"),
[ [
(True, True), (True, True),
(True, False), (True, False),

View file

@ -14,12 +14,12 @@ def test_get_password_from_command(tmpdir, runner):
[storage foo] [storage foo]
type.fetch = ["shell", "echo filesystem"] type.fetch = ["shell", "echo filesystem"]
path = "{str(tmpdir)}/foo/" path = "{tmpdir!s}/foo/"
fileext.fetch = ["command", "echo", ".txt"] fileext.fetch = ["command", "echo", ".txt"]
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/bar/" path = "{tmpdir!s}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"] fileext.fetch = ["prompt", "Fileext for bar"]
""" """
) )

View file

@ -58,7 +58,7 @@ def test_repair_uids(storage, runner, repair_uids):
else: else:
opt = ["--no-repair-unsafe-uid"] opt = ["--no-repair-unsafe-uid"]
result = runner.invoke(["repair"] + opt + ["foo"], input="y") result = runner.invoke(["repair", *opt, "foo"], input="y")
assert not result.exception assert not result.exception
if repair_uids: if repair_uids:

View file

@ -90,9 +90,7 @@ def test_empty_storage(tmpdir, runner):
result = runner.invoke(["sync"]) result = runner.invoke(["sync"])
lines = result.output.splitlines() lines = result.output.splitlines()
assert lines[0] == "Syncing my_pair" assert lines[0] == "Syncing my_pair"
assert lines[1].startswith( assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.')
"error: my_pair: " 'Storage "my_b" was completely emptied.'
)
assert result.exception assert result.exception
@ -290,12 +288,12 @@ def test_create_collections(collections, tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/bar/" path = "{tmpdir!s}/bar/"
fileext = ".txt" fileext = ".txt"
""" """
) )
@ -323,12 +321,12 @@ def test_ident_conflict(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/bar/" path = "{tmpdir!s}/bar/"
fileext = ".txt" fileext = ".txt"
""" """
) )
@ -360,7 +358,7 @@ def test_ident_conflict(tmpdir, runner):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"existing,missing", ("existing", "missing"),
[ [
("foo", "bar"), ("foo", "bar"),
("bar", "foo"), ("bar", "foo"),
@ -377,7 +375,7 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
[storage {existing}] [storage {existing}]
type = "filesystem" type = "filesystem"
path = "{str(tmpdir)}/{existing}/" path = "{tmpdir!s}/{existing}/"
fileext = ".txt" fileext = ".txt"
""" """
) )
@ -404,7 +402,7 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"resolution,expect_foo,expect_bar", ("resolution", "expect_foo", "expect_bar"),
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")], [(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
) )
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar): def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
@ -420,12 +418,12 @@ def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar)
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{str(tmpdir)}/foo" path = "{tmpdir!s}/foo"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{str(tmpdir)}/bar" path = "{tmpdir!s}/bar"
""" """
) )
) )
@ -520,7 +518,7 @@ def test_fetch_only_necessary_params(tmpdir, runner):
dedent( dedent(
f""" f"""
set -e set -e
touch "{str(fetched_file)}" touch "{fetched_file!s}"
echo ".txt" echo ".txt"
""" """
) )
@ -553,9 +551,7 @@ def test_fetch_only_necessary_params(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{path}" path = "{path}"
fileext.fetch = ["command", "sh", "{script}"] fileext.fetch = ["command", "sh", "{script}"]
""".format( """.format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script))
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
)
) )
) )

View file

@ -14,7 +14,7 @@ def test_handle_cli_error(capsys):
except BaseException: except BaseException:
handle_cli_error() handle_cli_error()
out, err = capsys.readouterr() _out, err = capsys.readouterr()
assert "returned something vdirsyncer doesn't understand" in err assert "returned something vdirsyncer doesn't understand" in err
assert "ayy lmao" in err assert "ayy lmao" in err

View file

@ -9,7 +9,7 @@ missing = object()
@pytest.mark.parametrize( @pytest.mark.parametrize(
"shortcuts,expected", ("shortcuts", "expected"),
[ [
( (
["from a"], ["from a"],

View file

@ -1,5 +1,7 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import hypothesis.strategies as st import hypothesis.strategies as st
from hypothesis import assume from hypothesis import assume
from hypothesis import given from hypothesis import given
@ -24,13 +26,13 @@ def test_legacy_status(status_dict):
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()} hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()} hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
status = SqliteStatus() with contextlib.closing(SqliteStatus()) as status:
status.load_legacy_status(status_dict) status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict assert dict(status.to_legacy_status()) == status_dict
for ident, (meta_a, meta_b) in status_dict.items(): for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a["href"]) ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b["href"]) ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident assert ident_a == ident_b == ident

View file

@ -1,6 +1,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import contextlib
from copy import deepcopy from copy import deepcopy
import aiostream import aiostream
@ -25,13 +26,12 @@ from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
async def sync(a, b, status, *args, **kwargs): async def sync(a, b, status, *args, **kwargs) -> None:
new_status = SqliteStatus(":memory:") with contextlib.closing(SqliteStatus(":memory:")) as new_status:
new_status.load_legacy_status(status) new_status.load_legacy_status(status)
rv = await _sync(a, b, new_status, *args, **kwargs) await _sync(a, b, new_status, *args, **kwargs)
status.clear() status.clear()
status.update(new_status.to_legacy_status()) status.update(new_status.to_legacy_status())
return rv
def empty_storage(x): def empty_storage(x):
@ -98,7 +98,8 @@ async def test_read_only_and_prefetch():
await sync(a, b, status, force_delete=True) await sync(a, b, status, force_delete=True)
await sync(a, b, status, force_delete=True) await sync(a, b, status, force_delete=True)
assert not items(a) and not items(b) assert not items(a)
assert not items(b)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -226,7 +227,8 @@ async def test_insert_hash():
await a.update(href, Item("UID:1\nHAHA:YES"), etag) await a.update(href, Item("UID:1\nHAHA:YES"), etag)
await sync(a, b, status) await sync(a, b, status)
assert "hash" in status["1"][0] and "hash" in status["1"][1] assert "hash" in status["1"][0]
assert "hash" in status["1"][1]
@pytest.mark.asyncio @pytest.mark.asyncio
@ -346,7 +348,7 @@ async def test_uses_get_multi(monkeypatch):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item("UID:1") item = Item("UID:1")
expected_href, etag = await a.upload(item) expected_href, _etag = await a.upload(item)
await sync(a, b, {}) await sync(a, b, {})
assert get_multi_calls == [[expected_href]] assert get_multi_calls == [[expected_href]]
@ -383,7 +385,7 @@ async def test_changed_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = await a.upload(Item("UID:A-ONE")) href_a, etag_a = await a.upload(Item("UID:A-ONE"))
href_b, etag_b = await b.upload(Item("UID:B-ONE")) _href_b, _etag_b = await b.upload(Item("UID:B-ONE"))
status = {} status = {}
await sync(a, b, status) await sync(a, b, status)
@ -437,7 +439,7 @@ async def test_partial_sync_revert():
assert items(a) == {"UID:2"} assert items(a) == {"UID:2"}
@pytest.mark.parametrize("sync_inbetween", (True, False)) @pytest.mark.parametrize("sync_inbetween", [True, False])
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_ident_conflict(sync_inbetween): async def test_ident_conflict(sync_inbetween):
a = MemoryStorage() a = MemoryStorage()
@ -467,7 +469,7 @@ async def test_moved_href():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = await a.upload(Item("UID:haha")) _href, _etag = await a.upload(Item("UID:haha"))
await sync(a, b, status) await sync(a, b, status)
b.items["lol"] = b.items.pop("haha") b.items["lol"] = b.items.pop("haha")
@ -528,7 +530,7 @@ async def test_unicode_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = await a.upload(Item("UID:äää")) _href, _etag = await a.upload(Item("UID:äää"))
await sync(a, b, status) await sync(a, b, status)
@ -551,7 +553,7 @@ class SyncMachine(RuleBasedStateMachine):
if flaky_etags: if flaky_etags:
async def get(href): async def get(href):
old_etag, item = s.items[href] _old_etag, item = s.items[href]
etag = _random_string() etag = _random_string()
s.items[href] = etag, item s.items[href] = etag, item
return item, etag return item, etag
@ -642,10 +644,7 @@ class SyncMachine(RuleBasedStateMachine):
errors = [] errors = []
if with_error_callback: error_callback = errors.append if with_error_callback else None
error_callback = errors.append
else:
error_callback = None
try: try:
# If one storage is read-only, double-sync because changes don't # If one storage is read-only, double-sync because changes don't
@ -668,7 +667,8 @@ class SyncMachine(RuleBasedStateMachine):
except ActionIntentionallyFailed: except ActionIntentionallyFailed:
pass pass
except BothReadOnly: except BothReadOnly:
assert a.read_only and b.read_only assert a.read_only
assert b.read_only
assume(False) assume(False)
except StorageEmpty: except StorageEmpty:
if force_delete: if force_delete:

View file

@ -1,5 +1,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest import pytest
import pytest_asyncio import pytest_asyncio
@ -33,7 +35,8 @@ async def test_basic(monkeypatch):
await a.set_meta("foo", None) await a.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") is None and await b.get_meta("foo") is None assert await a.get_meta("foo") is None
assert await b.get_meta("foo") is None
await a.set_meta("foo", "bar") await a.set_meta("foo", "bar")
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
@ -52,27 +55,24 @@ async def test_basic(monkeypatch):
await b.set_meta("foo", None) await b.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
assert not await a.get_meta("foo") and not await b.get_meta("foo") assert not await a.get_meta("foo")
assert not await b.get_meta("foo")
@pytest_asyncio.fixture @pytest_asyncio.fixture
@pytest.mark.asyncio async def conflict_state(request):
async def conflict_state(request, event_loop):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", "bar") await a.set_meta("foo", "bar")
await b.set_meta("foo", "baz") await b.set_meta("foo", "baz")
def cleanup(): async def do_cleanup():
async def do_cleanup(): assert await a.get_meta("foo") == "bar"
assert await a.get_meta("foo") == "bar" assert await b.get_meta("foo") == "baz"
assert await b.get_meta("foo") == "baz" assert not status
assert not status
event_loop.run_until_complete(do_cleanup()) request.addfinalizer(lambda: asyncio.run(do_cleanup()))
request.addfinalizer(cleanup)
return a, b, status return a, b, status

View file

@ -17,7 +17,7 @@ from vdirsyncer.vobject import Item
@given(uid=uid_strategy) @given(uid=uid_strategy)
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=HealthCheck.all()) @settings(suppress_health_check=list(HealthCheck))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_repair_uids(uid): async def test_repair_uids(uid):
s = MemoryStorage() s = MemoryStorage()
@ -40,12 +40,12 @@ async def test_repair_uids(uid):
@given(uid=uid_strategy.filter(lambda x: not href_safe(x))) @given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=HealthCheck.all()) @settings(suppress_health_check=list(HealthCheck))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_repair_unsafe_uids(uid): async def test_repair_unsafe_uids(uid):
s = MemoryStorage() s = MemoryStorage()
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
href, etag = await s.upload(item) href, _etag = await s.upload(item)
assert (await s.get(href))[0].uid == uid assert (await s.get(href))[0].uid == uid
assert not href_safe(uid) assert not href_safe(uid)
@ -58,7 +58,7 @@ async def test_repair_unsafe_uids(uid):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")] ("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
) )
def test_repair_unsafe_href(uid, href): def test_repair_unsafe_href(uid, href):
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")

136
tests/unit/test_retry.py Normal file
View file

@ -0,0 +1,136 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock
from unittest.mock import Mock
import aiohttp
import pytest
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
async def _create_mock_response(status: int, body: str | dict):
raw_body = body
text_body = json.dumps(body) if isinstance(body, dict) else body
mock_response = AsyncMock()
mock_response.status = status
mock_response.ok = 200 <= status < 300
mock_response.reason = "OK" if mock_response.ok else "Forbidden"
mock_response.headers = (
{"Content-Type": "application/json"}
if isinstance(raw_body, dict)
else {"Content-Type": "text/plain"}
)
mock_response.text.return_value = text_body
if isinstance(raw_body, dict):
mock_response.json.return_value = raw_body
else:
mock_response.json.side_effect = ValueError("Not JSON")
mock_response.raise_for_status = Mock(
side_effect=(
aiohttp.ClientResponseError(
request_info=AsyncMock(),
history=(),
status=status,
message=mock_response.reason,
headers=mock_response.headers,
)
if not mock_response.ok
else None
)
)
return mock_response
@pytest.mark.asyncio
async def test_request_retry_on_usage_limit():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate (max_retries - 1) 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries - 1)
] + [await _create_mock_response(200, "OK")]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
response = await request("GET", url, mock_session)
assert response.status == 200
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_retry_exceeds_max_attempts():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate max_retries 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries)
]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
with pytest.raises(UsageLimitReached):
await request("GET", url, mock_session)
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_json():
url = "http://example.com/api"
mock_session = AsyncMock()
# Generic non-Google 403 error payload (e.g., GitHub-style)
mock_session.request.side_effect = [
await _create_mock_response(403, {"message": "API rate limit exceeded"})
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because it's not the Google quotaExceeded shape
assert mock_session.request.call_count == 1
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_text():
url = "http://example.com/api"
mock_session = AsyncMock()
# Plain-text 403 body mentioning rate limits, but not structured as Google error
mock_session.request.side_effect = [
await _create_mock_response(403, "Rate limit exceeded")
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because the JSON shape is not Google quotaExceeded
assert mock_session.request.call_count == 1

View file

@ -25,7 +25,7 @@ _simple_split = [
] ]
_simple_joined = "\r\n".join( _simple_joined = "\r\n".join(
["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"] ["BEGIN:VADDRESSBOOK", *_simple_split, "END:VADDRESSBOOK\r\n"]
) )
@ -124,7 +124,7 @@ def test_split_collection_timezones():
"END:VTIMEZONE" "END:VTIMEZONE"
) )
full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"]) full = "\r\n".join(["BEGIN:VCALENDAR", *items, timezone, "END:VCALENDAR"])
given = {normalize_item(item) for item in vobject.split_collection(full)} given = {normalize_item(item) for item in vobject.split_collection(full)}
expected = { expected = {
@ -154,7 +154,7 @@ def test_hash_item():
def test_multiline_uid(benchmark): def test_multiline_uid(benchmark):
a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n" a = "BEGIN:FOO\r\nUID:123456789abcd\r\n efgh\r\nEND:FOO\r\n"
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh" assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
@ -299,7 +299,7 @@ def test_input_types():
value_strategy = st.text( value_strategy = st.text(
st.characters( st.characters(
blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":=" exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), exclude_characters=":="
), ),
min_size=1, min_size=1,
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)
@ -335,7 +335,8 @@ class VobjectMachine(RuleBasedStateMachine):
assert key in c assert key in c
assert c.get(key) == value assert c.get(key) == value
dump = "\r\n".join(c.dump_lines()) dump = "\r\n".join(c.dump_lines())
assert key in dump and value in dump assert key in dump
assert value in dump
@rule( @rule(
c=Parsed, c=Parsed,
@ -365,6 +366,16 @@ class VobjectMachine(RuleBasedStateMachine):
TestVobjectMachine = VobjectMachine.TestCase TestVobjectMachine = VobjectMachine.TestCase
def test_dupe_consecutive_keys():
state = VobjectMachine()
unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
parsed_0 = state.parse(unparsed=unparsed_0)
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop(c=parsed_0, key="0", value="1")
state.teardown()
def test_component_contains(): def test_component_contains():
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"]) item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
@ -372,4 +383,4 @@ def test_component_contains():
assert "BAZ" not in item assert "BAZ" not in item
with pytest.raises(ValueError): with pytest.raises(ValueError):
42 in item # noqa: B015 42 in item # noqa: B015, this check raises.

View file

@ -9,7 +9,7 @@ BUGTRACKER_HOME = PROJECT_HOME + "/issues"
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable" DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
try: try:
from .version import version as __version__ # noqa from .version import version as __version__
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
raise ImportError( raise ImportError(
"Failed to find (autogenerated) version.py. " "Failed to find (autogenerated) version.py. "
@ -17,12 +17,14 @@ except ImportError: # pragma: no cover
"use the PyPI ones." "use the PyPI ones."
) )
__all__ = ["__version__"]
def _check_python_version(): def _check_python_version():
import sys import sys
if sys.version_info < (3, 7, 0): # noqa: UP036 if sys.version_info < (3, 9, 0): # noqa: UP036
print("vdirsyncer requires at least Python 3.7.") print("vdirsyncer requires at least Python 3.9.")
sys.exit(1) sys.exit(1)

View file

@ -10,8 +10,8 @@ import aiohttp
import click import click
import click_log import click_log
from .. import BUGTRACKER_HOME from vdirsyncer import BUGTRACKER_HOME
from .. import __version__ from vdirsyncer import __version__
cli_logger = logging.getLogger(__name__) cli_logger = logging.getLogger(__name__)
click_log.basic_config("vdirsyncer") click_log.basic_config("vdirsyncer")
@ -147,7 +147,14 @@ def sync(ctx, collections, force_delete):
) )
) )
await asyncio.gather(*tasks) # `return_exceptions=True` ensures that the event loop lives long enough for
# backoffs to be able to finish
gathered = await asyncio.gather(*tasks, return_exceptions=True)
# but now we need to manually check for and propogate a single failure after
# allowing all tasks to finish in order to keep exit status non-zero
failures = [e for e in gathered if isinstance(e, BaseException)]
if failures:
raise failures[0]
asyncio.run(main(collections)) asyncio.run(main(collections))

View file

@ -3,17 +3,18 @@ from __future__ import annotations
import json import json
import os import os
import string import string
from collections.abc import Generator
from configparser import RawConfigParser from configparser import RawConfigParser
from functools import cached_property
from itertools import chain from itertools import chain
from typing import IO from typing import IO
from typing import Any from typing import Any
from typing import Generator
from .. import PROJECT_HOME from vdirsyncer import PROJECT_HOME
from .. import exceptions from vdirsyncer import exceptions
from ..utils import cached_property from vdirsyncer.utils import expand_path
from ..utils import expand_path from vdirsyncer.vobject import Item
from ..vobject import Item
from .fetchparams import expand_fetch_params from .fetchparams import expand_fetch_params
from .utils import storage_class_from_config from .utils import storage_class_from_config
@ -92,7 +93,15 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.") raise ValueError("Duplicate value.")
collection_names.add(collection_name) collection_names.add(collection_name)
except ValueError as e: except ValueError as e:
raise ValueError(f"`collections` parameter, position {i}: {str(e)}") raise ValueError(f"`collections` parameter, position {i}: {e!s}")
def _validate_implicit_param(implicit):
if implicit is None:
return
if implicit != "create":
raise ValueError("`implicit` parameter must be 'create' or absent.")
class _ConfigReader: class _ConfigReader:
@ -141,7 +150,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)), dict(_parse_options(self._parser.items(section), section=section)),
) )
except ValueError as e: except ValueError as e:
raise exceptions.UserError(f'Section "{section}": {str(e)}') raise exceptions.UserError(f'Section "{section}": {e!s}')
_validate_general_section(self._general) _validate_general_section(self._general)
if getattr(self._file, "name", None): if getattr(self._file, "name", None):
@ -229,9 +238,10 @@ class PairConfig:
self.name: str = name self.name: str = name
self.name_a: str = options.pop("a") self.name_a: str = options.pop("a")
self.name_b: str = options.pop("b") self.name_b: str = options.pop("b")
self.implicit = options.pop("implicit", None)
self._partial_sync: str | None = options.pop("partial_sync", None) self._partial_sync: str | None = options.pop("partial_sync", None)
self.metadata = options.pop("metadata", None) or () self.metadata: str | tuple[()] = options.pop("metadata", ())
self.conflict_resolution = self._process_conflict_resolution_param( self.conflict_resolution = self._process_conflict_resolution_param(
options.pop("conflict_resolution", None) options.pop("conflict_resolution", None)
@ -247,6 +257,7 @@ class PairConfig:
) )
else: else:
_validate_collections_param(self.collections) _validate_collections_param(self.collections)
_validate_implicit_param(self.implicit)
if options: if options:
raise ValueError("Unknown options: {}".format(", ".join(options))) raise ValueError("Unknown options: {}".format(", ".join(options)))
@ -256,7 +267,7 @@ class PairConfig:
): ):
if conflict_resolution in (None, "a wins", "b wins"): if conflict_resolution in (None, "a wins", "b wins"):
return conflict_resolution return conflict_resolution
elif ( if (
isinstance(conflict_resolution, list) isinstance(conflict_resolution, list)
and len(conflict_resolution) > 1 and len(conflict_resolution) > 1
and conflict_resolution[0] == "command" and conflict_resolution[0] == "command"
@ -270,8 +281,7 @@ class PairConfig:
return _resolve_conflict_via_command(a, b, command, a_name, b_name) return _resolve_conflict_via_command(a, b, command, a_name, b_name)
return resolve return resolve
else: raise ValueError("Invalid value for `conflict_resolution`.")
raise ValueError("Invalid value for `conflict_resolution`.")
# The following parameters are lazily evaluated because evaluating # The following parameters are lazily evaluated because evaluating
# self.config_a would expand all `x.fetch` parameters. This is costly and # self.config_a would expand all `x.fetch` parameters. This is costly and
@ -338,7 +348,7 @@ def _resolve_conflict_via_command(
if _check_call is None: if _check_call is None:
from subprocess import check_call as _check_call from subprocess import check_call as _check_call
from ..vobject import Item from vdirsyncer.vobject import Item
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.") dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
try: try:
@ -351,7 +361,7 @@ def _resolve_conflict_via_command(
f.write(b.raw) f.write(b.raw)
command[0] = expand_path(command[0]) command[0] = expand_path(command[0])
_check_call(command + [a_tmp, b_tmp]) _check_call([*command, a_tmp, b_tmp])
with open(a_tmp) as f: with open(a_tmp) as f:
new_a = f.read() new_a = f.read()
@ -359,7 +369,7 @@ def _resolve_conflict_via_command(
new_b = f.read() new_b = f.read()
if new_a != new_b: if new_a != new_b:
raise exceptions.UserError("The two files are not completely " "equal.") raise exceptions.UserError("The two files are not completely equal.")
return Item(new_a) return Item(new_a)
finally: finally:
shutil.rmtree(dir) shutil.rmtree(dir)

View file

@ -9,7 +9,8 @@ import sys
import aiohttp import aiohttp
import aiostream import aiostream
from .. import exceptions from vdirsyncer import exceptions
from .utils import handle_collection_not_found from .utils import handle_collection_not_found
from .utils import handle_storage_init_error from .utils import handle_storage_init_error
from .utils import load_status from .utils import load_status
@ -59,22 +60,20 @@ async def collections_for_pair(
cache_key = _get_collections_cache_key(pair) cache_key = _get_collections_cache_key(pair)
if from_cache: if from_cache:
rv = load_status(status_path, pair.name, data_type="collections") rv = load_status(status_path, pair.name, data_type="collections")
if rv.get("cache_key", None) == cache_key: if rv and rv.get("cache_key", None) == cache_key:
return list( return list(
_expand_collections_cache( _expand_collections_cache(
rv["collections"], pair.config_a, pair.config_b rv["collections"], pair.config_a, pair.config_b
) )
) )
elif rv: if rv:
raise exceptions.UserError( raise exceptions.UserError(
"Detected change in config file, " "Detected change in config file, "
f"please run `vdirsyncer discover {pair.name}`." f"please run `vdirsyncer discover {pair.name}`."
) )
else: raise exceptions.UserError(
raise exceptions.UserError( f"Please run `vdirsyncer discover {pair.name}` before synchronization."
f"Please run `vdirsyncer discover {pair.name}` " )
" before synchronization."
)
logger.info(f"Discovering collections for pair {pair.name}") logger.info(f"Discovering collections for pair {pair.name}")
@ -94,16 +93,23 @@ async def collections_for_pair(
connector=connector, connector=connector,
) )
async def _handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
return await handle_collection_not_found(
config, collection, e=e, implicit_create=pair.implicit == "create"
)
# We have to use a list here because the special None/null value would get # We have to use a list here because the special None/null value would get
# mangled to string (because JSON objects always have string keys). # mangled to string (because JSON objects always have string keys).
rv = await aiostream.stream.list( rv = await aiostream.stream.list( # type: ignore[assignment]
expand_collections( expand_collections(
shortcuts=pair.collections, shortcuts=pair.collections,
config_a=pair.config_a, config_a=pair.config_a,
config_b=pair.config_b, config_b=pair.config_b,
get_a_discovered=a_discovered.get_self, get_a_discovered=a_discovered.get_self,
get_b_discovered=b_discovered.get_self, get_b_discovered=b_discovered.get_self,
_handle_collection_not_found=handle_collection_not_found, _handle_collection_not_found=_handle_collection_not_found,
) )
) )

View file

@ -4,9 +4,10 @@ import logging
import click import click
from .. import exceptions from vdirsyncer import exceptions
from ..utils import expand_path from vdirsyncer.utils import expand_path
from ..utils import synchronized from vdirsyncer.utils import synchronized
from . import AppContext from . import AppContext
SUFFIX = ".fetch" SUFFIX = ".fetch"
@ -87,7 +88,7 @@ def _strategy_command(*command: str, shell: bool = False):
return stdout.strip("\n") return stdout.strip("\n")
except OSError as e: except OSError as e:
cmd = " ".join(expanded_command) cmd = " ".join(expanded_command)
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{str(e)}") raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}")
def _strategy_shell(*command: str): def _strategy_shell(*command: str):

View file

@ -4,8 +4,9 @@ import json
import aiohttp import aiohttp
from .. import exceptions from vdirsyncer import exceptions
from .. import sync from vdirsyncer import sync
from .config import CollectionConfig from .config import CollectionConfig
from .discover import DiscoverResult from .discover import DiscoverResult
from .discover import collections_for_pair from .discover import collections_for_pair
@ -35,10 +36,8 @@ async def prepare_pair(pair_name, collections, config, *, connector):
config_a, config_b = all_collections[collection_name] config_a, config_b = all_collections[collection_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
"Pair {}: Collection {} not found. These are the " f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found."
"configured collections:\n{}".format( f"These are the configured collections:\n{list(all_collections)}"
pair_name, json.dumps(collection_name), list(all_collections)
)
) )
collection = CollectionConfig(pair, collection_name, config_a, config_b) collection = CollectionConfig(pair, collection_name, config_a, config_b)
@ -105,7 +104,7 @@ async def repair_collection(
*, *,
connector: aiohttp.TCPConnector, connector: aiohttp.TCPConnector,
): ):
from ..repair import repair_storage from vdirsyncer.repair import repair_storage
storage_name, collection = collection, None storage_name, collection = collection, None
if "/" in storage_name: if "/" in storage_name:
@ -136,7 +135,7 @@ async def repair_collection(
async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector): async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector):
from ..metasync import metasync from vdirsyncer.metasync import metasync
pair = collection.pair pair = collection.pair
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)

View file

@ -10,19 +10,20 @@ from typing import Any
import aiohttp import aiohttp
import click import click
from atomicwrites import atomic_write
from .. import BUGTRACKER_HOME from vdirsyncer import BUGTRACKER_HOME
from .. import DOCS_HOME from vdirsyncer import DOCS_HOME
from .. import exceptions from vdirsyncer import exceptions
from ..storage.base import Storage from vdirsyncer.storage.base import Storage
from ..sync.exceptions import IdentConflict from vdirsyncer.sync.exceptions import IdentConflict
from ..sync.exceptions import PartialSync from vdirsyncer.sync.exceptions import PartialSync
from ..sync.exceptions import StorageEmpty from vdirsyncer.sync.exceptions import StorageEmpty
from ..sync.exceptions import SyncConflict from vdirsyncer.sync.exceptions import SyncConflict
from ..sync.status import SqliteStatus from vdirsyncer.sync.status import SqliteStatus
from ..utils import expand_path from vdirsyncer.utils import atomic_write
from ..utils import get_storage_init_args from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_storage_init_args
from . import cli_logger from . import cli_logger
STATUS_PERMISSIONS = 0o600 STATUS_PERMISSIONS = 0o600
@ -30,7 +31,7 @@ STATUS_DIR_PERMISSIONS = 0o700
class _StorageIndex: class _StorageIndex:
def __init__(self): def __init__(self) -> None:
self._storages: dict[str, str] = { self._storages: dict[str, str] = {
"caldav": "vdirsyncer.storage.dav.CalDAVStorage", "caldav": "vdirsyncer.storage.dav.CalDAVStorage",
"carddav": "vdirsyncer.storage.dav.CardDAVStorage", "carddav": "vdirsyncer.storage.dav.CardDAVStorage",
@ -78,13 +79,11 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.critical(e) cli_logger.critical(e)
except StorageEmpty as e: except StorageEmpty as e:
cli_logger.error( cli_logger.error(
'{status_name}: Storage "{name}" was completely emptied. If you ' f'{status_name}: Storage "{e.empty_storage.instance_name}" was '
"want to delete ALL entries on BOTH sides, then use " "completely emptied. If you want to delete ALL entries on BOTH sides,"
"`vdirsyncer sync --force-delete {status_name}`. " f"then use `vdirsyncer sync --force-delete {status_name}`. "
"Otherwise delete the files for {status_name} in your status " f"Otherwise delete the files for {status_name} in your status "
"directory.".format( "directory."
name=e.empty_storage.instance_name, status_name=status_name
)
) )
except PartialSync as e: except PartialSync as e:
cli_logger.error( cli_logger.error(
@ -232,7 +231,8 @@ def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
prepare_status_path(path) prepare_status_path(path)
status = SqliteStatus(path) status = SqliteStatus(path)
yield status with contextlib.closing(status):
yield status
def save_status( def save_status(
@ -286,15 +286,14 @@ async def storage_instance_from_config(
except exceptions.CollectionNotFound as e: except exceptions.CollectionNotFound as e:
if create: if create:
config = await handle_collection_not_found( config = await handle_collection_not_found(
config, config.get("collection", None), e=str(e) config, config.get("collection", None), e=str(e), implicit_create=True
) )
return await storage_instance_from_config( return await storage_instance_from_config(
config, config,
create=False, create=False,
connector=connector, connector=connector,
) )
else: raise
raise
except Exception: except Exception:
return handle_storage_init_error(cls, new_config) return handle_storage_init_error(cls, new_config)
@ -342,7 +341,9 @@ def assert_permissions(path: str, wanted: int) -> None:
os.chmod(path, wanted) os.chmod(path, wanted)
async def handle_collection_not_found(config, collection, e=None): async def handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
storage_name = config.get("instance_name", None) storage_name = config.get("instance_name", None)
cli_logger.warning( cli_logger.warning(
@ -351,7 +352,7 @@ async def handle_collection_not_found(config, collection, e=None):
) )
) )
if click.confirm("Should vdirsyncer attempt to create it?"): if implicit_create or click.confirm("Should vdirsyncer attempt to create it?"):
storage_type = config["type"] storage_type = config["type"]
cls, config = storage_class_from_config(config) cls, config = storage_class_from_config(config)
config["collection"] = collection config["collection"] = collection

View file

@ -1,14 +1,24 @@
from __future__ import annotations from __future__ import annotations
import asyncio
import logging import logging
import os
import platform
import re import re
from abc import ABC, abstractmethod from abc import ABC
from abc import abstractmethod
from base64 import b64encode from base64 import b64encode
from ssl import create_default_context from ssl import create_default_context
import aiohttp import aiohttp
import requests.auth import requests.auth
from aiohttp import ServerDisconnectedError
from aiohttp import ServerTimeoutError
from requests.utils import parse_dict_header from requests.utils import parse_dict_header
from tenacity import retry
from tenacity import retry_if_exception_type
from tenacity import stop_after_attempt
from tenacity import wait_exponential
from . import __version__ from . import __version__
from . import exceptions from . import exceptions
@ -17,6 +27,13 @@ from .utils import expand_path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
USERAGENT = f"vdirsyncer/{__version__}" USERAGENT = f"vdirsyncer/{__version__}"
# 'hack' to prevent aiohttp from loading the netrc config,
# but still allow it to read PROXY_* env vars.
# Otherwise, if our host is defined in the netrc config,
# aiohttp will overwrite our Authorization header.
# https://github.com/pimutils/vdirsyncer/issues/1138
os.environ["NETRC"] = "NUL" if platform.system() == "Windows" else "/dev/null"
class AuthMethod(ABC): class AuthMethod(ABC):
def __init__(self, username, password): def __init__(self, username, password):
@ -34,7 +51,11 @@ class AuthMethod(ABC):
def __eq__(self, other): def __eq__(self, other):
if not isinstance(other, AuthMethod): if not isinstance(other, AuthMethod):
return False return False
return self.__class__ == other.__class__ and self.username == other.username and self.password == other.password return (
self.__class__ == other.__class__
and self.username == other.username
and self.password == other.password
)
class BasicAuthMethod(AuthMethod): class BasicAuthMethod(AuthMethod):
@ -43,20 +64,19 @@ class BasicAuthMethod(AuthMethod):
def get_auth_header(self, _method, _url): def get_auth_header(self, _method, _url):
auth_str = f"{self.username}:{self.password}" auth_str = f"{self.username}:{self.password}"
return "Basic " + b64encode(auth_str.encode('utf-8')).decode("utf-8") return "Basic " + b64encode(auth_str.encode("utf-8")).decode("utf-8")
class DigestAuthMethod(AuthMethod): class DigestAuthMethod(AuthMethod):
# make class var to 'cache' the state, which is more efficient because otherwise # make class var to 'cache' the state, which is more efficient because otherwise
# each request would first require another 'initialization' request. # each request would first require another 'initialization' request.
_auth_helpers = {} _auth_helpers: dict[tuple[str, str], requests.auth.HTTPDigestAuth] = {}
def __init__(self, username, password): def __init__(self, username: str, password: str):
super().__init__(username, password) super().__init__(username, password)
self._auth_helper = self._auth_helpers.get( self._auth_helper = self._auth_helpers.get(
(username, password), (username, password), requests.auth.HTTPDigestAuth(username, password)
requests.auth.HTTPDigestAuth(username, password)
) )
self._auth_helpers[(username, password)] = self._auth_helper self._auth_helpers[(username, password)] = self._auth_helper
@ -78,7 +98,7 @@ class DigestAuthMethod(AuthMethod):
if not self.auth_helper_vars.chal: if not self.auth_helper_vars.chal:
# Need to do init request first # Need to do init request first
return '' return ""
return self._auth_helper.build_digest_header(method, url) return self._auth_helper.build_digest_header(method, url)
@ -87,13 +107,16 @@ def prepare_auth(auth, username, password):
if username and password: if username and password:
if auth == "basic" or auth is None: if auth == "basic" or auth is None:
return BasicAuthMethod(username, password) return BasicAuthMethod(username, password)
elif auth == "digest": if auth == "digest":
return DigestAuthMethod(username, password) return DigestAuthMethod(username, password)
elif auth == "guess": if auth == "guess":
raise exceptions.UserError(f"'Guess' authentication is not supported in this version of vdirsyncer. \n" raise exceptions.UserError(
f"Please explicitly specify either 'basic' or 'digest' auth instead. \n" "'Guess' authentication is not supported in this version of "
f"See the following issue for more information: " "vdirsyncer.\n"
f"https://github.com/pimutils/vdirsyncer/issues/1015") "Please explicitly specify either 'basic' or 'digest' auth instead. \n"
"See the following issue for more information: "
"https://github.com/pimutils/vdirsyncer/issues/1015"
)
else: else:
raise exceptions.UserError(f"Unknown authentication method: {auth}") raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth: elif auth:
@ -132,6 +155,64 @@ def prepare_client_cert(cert):
return cert return cert
class TransientNetworkError(exceptions.Error):
"""Transient network condition that should be retried."""
def _is_safe_to_retry_method(method: str) -> bool:
"""Returns True if the HTTP method is safe/idempotent to retry.
We consider these safe for our WebDAV usage:
- GET, HEAD, OPTIONS: standard safe methods
- PROPFIND, REPORT: read-only DAV queries used for listing/fetching
"""
return method.upper() in {"GET", "HEAD", "OPTIONS", "PROPFIND", "REPORT"}
class UsageLimitReached(exceptions.Error):
pass
async def _is_quota_exceeded_google(response: aiohttp.ClientResponse) -> bool:
"""Return True if the response JSON indicates Google-style `usageLimits` exceeded.
Expected shape:
{"error": {"errors": [{"domain": "usageLimits", ...}], ...}}
See https://developers.google.com/workspace/calendar/api/guides/errors#403_usage_limits_exceeded
"""
try:
data = await response.json(content_type=None)
except Exception:
return False
if not isinstance(data, dict):
return False
error = data.get("error")
if not isinstance(error, dict):
return False
errors = error.get("errors")
if not isinstance(errors, list):
return False
for entry in errors:
if isinstance(entry, dict) and entry.get("domain") == "usageLimits":
return True
return False
@retry(
stop=stop_after_attempt(5),
wait=wait_exponential(multiplier=1, min=4, max=10),
retry=(
retry_if_exception_type(UsageLimitReached)
| retry_if_exception_type(TransientNetworkError)
),
reraise=True,
)
async def request( async def request(
method, method,
url, url,
@ -165,7 +246,7 @@ async def request(
logger.debug("=" * 20) logger.debug("=" * 20)
logger.debug(f"{method} {url}") logger.debug(f"{method} {url}")
logger.debug(kwargs.get("headers", {})) logger.debug(kwargs.get("headers", {}))
logger.debug(kwargs.get("data", None)) logger.debug(kwargs.get("data"))
logger.debug("Sending request...") logger.debug("Sending request...")
assert isinstance(kwargs.get("data", b""), bytes) assert isinstance(kwargs.get("data", b""), bytes)
@ -177,23 +258,43 @@ async def request(
kwargs["ssl"] = ssl_context kwargs["ssl"] = ssl_context
headers = kwargs.pop("headers", {}) headers = kwargs.pop("headers", {})
num_401 = 0 response: aiohttp.ClientResponse | None = None
while num_401 < 2: for _attempt in range(2):
if auth: if auth:
headers["Authorization"] = auth.get_auth_header(method, url) headers["Authorization"] = auth.get_auth_header(method, url)
response = await session.request(method, url, headers=headers, **kwargs) try:
response = await session.request(method, url, headers=headers, **kwargs)
except (
ServerDisconnectedError,
ServerTimeoutError,
asyncio.TimeoutError,
) as e:
# Retry only if the method is safe/idempotent for our DAV use
if _is_safe_to_retry_method(method):
logger.debug(
f"Transient network error on {method} {url}: {e}. Will retry."
)
raise TransientNetworkError(str(e)) from e
raise e from None
if response is None:
raise RuntimeError("No HTTP response obtained")
if response.ok or not auth: if response.ok or not auth:
# we don't need to do the 401-loop if we don't do auth in the first place # we don't need to do the 401-loop if we don't do auth in the first place
break break
if response.status == 401: if response.status == 401:
num_401 += 1
auth.handle_401(response) auth.handle_401(response)
# retry once more after handling the 401 challenge
continue
else: else:
# some other error, will be handled later on # some other error, will be handled later on
break break
if response is None:
raise RuntimeError("No HTTP response obtained")
# See https://github.com/kennethreitz/requests/issues/2042 # See https://github.com/kennethreitz/requests/issues/2042
content_type = response.headers.get("Content-Type", "") content_type = response.headers.get("Content-Type", "")
if ( if (
@ -208,10 +309,18 @@ async def request(
logger.debug(response.headers) logger.debug(response.headers)
logger.debug(response.content) logger.debug(response.content)
if logger.getEffectiveLevel() <= logging.DEBUG and response.status >= 400:
# https://github.com/pimutils/vdirsyncer/issues/1186
logger.debug(await response.text())
if response.status == 403 and await _is_quota_exceeded_google(response):
raise UsageLimitReached(response.reason)
if response.status == 412: if response.status == 412:
raise exceptions.PreconditionFailed(response.reason) raise exceptions.PreconditionFailed(response.reason)
if response.status in (404, 410): if response.status in (404, 410):
raise exceptions.NotFoundError(response.reason) raise exceptions.NotFoundError(response.reason)
if response.status == 429:
raise UsageLimitReached(response.reason)
response.raise_for_status() response.raise_for_status()
return response return response

View file

@ -57,7 +57,7 @@ async def metasync(storage_a, storage_b, status, keys, conflict_resolution=None)
logger.debug(f"B: {b}") logger.debug(f"B: {b}")
logger.debug(f"S: {s}") logger.debug(f"S: {s}")
if a != s and b != s or storage_a.read_only or storage_b.read_only: if (a != s and b != s) or storage_a.read_only or storage_b.read_only:
await _resolve_conflict() await _resolve_conflict()
elif a != s and b == s: elif a != s and b == s:
await _a_to_b() await _a_to_b()

View file

@ -56,9 +56,7 @@ def repair_item(href, item, seen_uids, repair_unsafe_uid):
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
elif not href_safe(item.uid) or not href_safe(basename(href)): elif not href_safe(item.uid) or not href_safe(basename(href)):
if not repair_unsafe_uid: if not repair_unsafe_uid:
logger.warning( logger.warning("UID may cause problems, add --repair-unsafe-uid to repair.")
"UID may cause problems, add " "--repair-unsafe-uid to repair."
)
else: else:
logger.warning("UID or href is unsafe, assigning random UID.") logger.warning("UID or href is unsafe, assigning random UID.")
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())

View file

@ -4,13 +4,12 @@ import contextlib
import functools import functools
from abc import ABCMeta from abc import ABCMeta
from abc import abstractmethod from abc import abstractmethod
from typing import Iterable from collections.abc import Iterable
from vdirsyncer import exceptions
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import exceptions
from ..utils import uniq
def mutating_storage_method(f): def mutating_storage_method(f):
"""Wrap a method and fail if the instance is readonly.""" """Wrap a method and fail if the instance is readonly."""
@ -76,7 +75,7 @@ class Storage(metaclass=StorageMeta):
read_only = False read_only = False
# The attribute values to show in the representation of the storage. # The attribute values to show in the representation of the storage.
_repr_attributes: list[str] = [] _repr_attributes: tuple[str, ...] = ()
def __init__( def __init__(
self, self,
@ -141,10 +140,8 @@ class Storage(metaclass=StorageMeta):
except ValueError: except ValueError:
pass pass
return "<{}(**{})>".format( attrs = {x: getattr(self, x) for x in self._repr_attributes}
self.__class__.__name__, return f"<{self.__class__.__name__}(**{attrs})>"
{x: getattr(self, x) for x in self._repr_attributes},
)
@abstractmethod @abstractmethod
async def list(self) -> list[tuple]: async def list(self) -> list[tuple]:
@ -153,7 +150,7 @@ class Storage(metaclass=StorageMeta):
""" """
@abstractmethod @abstractmethod
async def get(self, href: str): async def get(self, href: str) -> tuple[Item, str]:
"""Fetch a single item. """Fetch a single item.
:param href: href to fetch :param href: href to fetch

View file

@ -1,26 +1,28 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import datetime import datetime
import logging import logging
import urllib.parse as urlparse import urllib.parse as urlparse
import xml.etree.ElementTree as etree import xml.etree.ElementTree as etree
from abc import abstractmethod from abc import abstractmethod
from functools import cached_property
from inspect import getfullargspec from inspect import getfullargspec
from inspect import signature from inspect import signature
import aiohttp import aiohttp
import aiostream import aiostream
from vdirsyncer import exceptions
from vdirsyncer import http
from vdirsyncer import utils
from vdirsyncer.exceptions import Error from vdirsyncer.exceptions import Error
from vdirsyncer.http import USERAGENT
from vdirsyncer.http import prepare_auth
from vdirsyncer.http import prepare_client_cert
from vdirsyncer.http import prepare_verify
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import exceptions
from .. import http
from .. import utils
from ..http import USERAGENT
from ..http import prepare_auth
from ..http import prepare_client_cert
from ..http import prepare_verify
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -113,10 +115,8 @@ def _fuzzy_matches_mimetype(strict, weak):
if strict is None or weak is None: if strict is None or weak is None:
return True return True
mediatype, subtype = strict.split("/") _mediatype, subtype = strict.split("/")
if subtype in weak: return subtype in weak
return True
return False
class Discover: class Discover:
@ -219,10 +219,8 @@ class Discover:
async def find_collections(self): async def find_collections(self):
rv = None rv = None
try: with contextlib.suppress(aiohttp.ClientResponseError, exceptions.Error):
rv = await aiostream.stream.list(self._find_collections_impl("")) rv = await aiostream.stream.list(self._find_collections_impl(""))
except (aiohttp.ClientResponseError, exceptions.Error):
pass
if rv: if rv:
return rv return rv
@ -237,7 +235,7 @@ class Discover:
return True return True
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop")) props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
if props is None or not len(props): if props is None or not props:
dav_logger.debug("Skipping, missing <prop>: %s", response) dav_logger.debug("Skipping, missing <prop>: %s", response)
return False return False
if props.find("{DAV:}resourcetype/" + self._resourcetype) is None: if props.find("{DAV:}resourcetype/" + self._resourcetype) is None:
@ -261,7 +259,7 @@ class Discover:
href = response.find("{DAV:}href") href = response.find("{DAV:}href")
if href is None: if href is None:
raise InvalidXMLResponse("Missing href tag for collection " "props.") raise InvalidXMLResponse("Missing href tag for collection props.")
href = urlparse.urljoin(str(r.url), href.text) href = urlparse.urljoin(str(r.url), href.text)
if href not in done: if href not in done:
done.add(href) done.add(href)
@ -310,9 +308,7 @@ class Discover:
</mkcol> </mkcol>
""".format( """.format(
etree.tostring(etree.Element(self._resourcetype), encoding="unicode") etree.tostring(etree.Element(self._resourcetype), encoding="unicode")
).encode( ).encode("utf-8")
"utf-8"
)
response = await self.session.request( response = await self.session.request(
"MKCOL", "MKCOL",
@ -325,7 +321,7 @@ class Discover:
class CalDiscover(Discover): class CalDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:caldav" _namespace = "urn:ietf:params:xml:ns:caldav"
_resourcetype = "{%s}calendar" % _namespace _resourcetype = f"{{{_namespace}}}calendar"
_homeset_xml = b""" _homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav"> <propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav">
<prop> <prop>
@ -333,13 +329,13 @@ class CalDiscover(Discover):
</prop> </prop>
</propfind> </propfind>
""" """
_homeset_tag = "{%s}calendar-home-set" % _namespace _homeset_tag = f"{{{_namespace}}}calendar-home-set"
_well_known_uri = "/.well-known/caldav" _well_known_uri = "/.well-known/caldav"
class CardDiscover(Discover): class CardDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:carddav" _namespace = "urn:ietf:params:xml:ns:carddav"
_resourcetype: str | None = "{%s}addressbook" % _namespace _resourcetype: str | None = f"{{{_namespace}}}addressbook"
_homeset_xml = b""" _homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav"> <propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav">
<prop> <prop>
@ -347,7 +343,7 @@ class CardDiscover(Discover):
</prop> </prop>
</propfind> </propfind>
""" """
_homeset_tag = "{%s}addressbook-home-set" % _namespace _homeset_tag = f"{{{_namespace}}}addressbook-home-set"
_well_known_uri = "/.well-known/carddav" _well_known_uri = "/.well-known/carddav"
@ -395,7 +391,7 @@ class DAVSession:
self.url = url.rstrip("/") + "/" self.url = url.rstrip("/") + "/"
self.connector = connector self.connector = connector
@utils.cached_property @cached_property
def parsed_url(self): def parsed_url(self):
return urlparse.urlparse(self.url) return urlparse.urlparse(self.url)
@ -456,7 +452,7 @@ class DAVStorage(Storage):
connector: aiohttp.TCPConnector connector: aiohttp.TCPConnector
_repr_attributes = ["username", "url"] _repr_attributes = ("username", "url")
_property_table = { _property_table = {
"displayname": ("displayname", "DAV:"), "displayname": ("displayname", "DAV:"),
@ -501,8 +497,12 @@ class DAVStorage(Storage):
def _is_item_mimetype(self, mimetype): def _is_item_mimetype(self, mimetype):
return _fuzzy_matches_mimetype(self.item_mimetype, mimetype) return _fuzzy_matches_mimetype(self.item_mimetype, mimetype)
async def get(self, href: str): async def get(self, href: str) -> tuple[Item, str]:
((actual_href, item, etag),) = await aiostream.stream.list( actual_href: str
item: Item
etag: str
((actual_href, item, etag),) = await aiostream.stream.list( # type: ignore[misc]
self.get_multi([href]) self.get_multi([href])
) )
assert href == actual_href assert href == actual_href
@ -628,7 +628,7 @@ class DAVStorage(Storage):
continue continue
props = response.findall("{DAV:}propstat/{DAV:}prop") props = response.findall("{DAV:}propstat/{DAV:}prop")
if props is None or not len(props): if props is None or not props:
dav_logger.debug(f"Skipping {href!r}, properties are missing.") dav_logger.debug(f"Skipping {href!r}, properties are missing.")
continue continue
else: else:
@ -740,9 +740,7 @@ class DAVStorage(Storage):
""".format( """.format(
etree.tostring(element, encoding="unicode"), etree.tostring(element, encoding="unicode"),
action=action, action=action,
).encode( ).encode("utf-8")
"utf-8"
)
await self.session.request( await self.session.request(
"PROPPATCH", "PROPPATCH",
@ -796,7 +794,7 @@ class CalDAVStorage(DAVStorage):
self.item_types = tuple(item_types) self.item_types = tuple(item_types)
if (start_date is None) != (end_date is None): if (start_date is None) != (end_date is None):
raise exceptions.UserError( raise exceptions.UserError(
"If start_date is given, " "end_date has to be given too." "If start_date is given, end_date has to be given too."
) )
elif start_date is not None and end_date is not None: elif start_date is not None and end_date is not None:
namespace = dict(datetime.__dict__) namespace = dict(datetime.__dict__)

View file

@ -1,18 +1,19 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import errno import errno
import logging import logging
import os import os
import subprocess import subprocess
from atomicwrites import atomic_write from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkdir
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import generate_href
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.vobject import Item
from .. import exceptions
from ..utils import checkdir
from ..utils import expand_path
from ..utils import generate_href
from ..utils import get_etag_from_file
from ..vobject import Item
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -21,7 +22,7 @@ logger = logging.getLogger(__name__)
class FilesystemStorage(Storage): class FilesystemStorage(Storage):
storage_name = "filesystem" storage_name = "filesystem"
_repr_attributes = ["path"] _repr_attributes = ("path",)
def __init__( def __init__(
self, self,
@ -65,9 +66,7 @@ class FilesystemStorage(Storage):
def _validate_collection(cls, path): def _validate_collection(cls, path):
if not os.path.isdir(path): if not os.path.isdir(path):
return False return False
if os.path.basename(path).startswith("."): return not os.path.basename(path).startswith(".")
return False
return True
@classmethod @classmethod
async def create_collection(cls, collection, **kwargs): async def create_collection(cls, collection, **kwargs):
@ -99,7 +98,7 @@ class FilesystemStorage(Storage):
): ):
yield fname, get_etag_from_file(fpath) yield fname, get_etag_from_file(fpath)
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
fpath = self._get_filepath(href) fpath = self._get_filepath(href)
try: try:
with open(fpath, "rb") as f: with open(fpath, "rb") as f:
@ -178,7 +177,7 @@ class FilesystemStorage(Storage):
try: try:
subprocess.call([self.post_hook, fpath]) subprocess.call([self.post_hook, fpath])
except OSError as e: except OSError as e:
logger.warning(f"Error executing external hook: {str(e)}") logger.warning(f"Error executing external hook: {e!s}")
def _run_pre_deletion_hook(self, fpath): def _run_pre_deletion_hook(self, fpath):
logger.info( logger.info(
@ -187,7 +186,7 @@ class FilesystemStorage(Storage):
try: try:
subprocess.call([self.pre_deletion_hook, fpath]) subprocess.call([self.pre_deletion_hook, fpath])
except OSError as e: except OSError as e:
logger.warning(f"Error executing external hook: {str(e)}") logger.warning(f"Error executing external hook: {e!s}")
async def get_meta(self, key): async def get_meta(self, key):
fpath = os.path.join(self.path, key) fpath = os.path.join(self.path, key)
@ -205,10 +204,8 @@ class FilesystemStorage(Storage):
fpath = os.path.join(self.path, key) fpath = os.path.join(self.path, key)
if value is None: if value is None:
try: with contextlib.suppress(OSError):
os.remove(fpath) os.remove(fpath)
except OSError:
pass
else: else:
with atomic_write(fpath, mode="wb", overwrite=True) as f: with atomic_write(fpath, mode="wb", overwrite=True) as f:
f.write(value.encode(self.encoding)) f.write(value.encode(self.encoding))

View file

@ -11,12 +11,13 @@ from threading import Thread
import aiohttp import aiohttp
import click import click
from atomicwrites import atomic_write
from .. import exceptions from vdirsyncer import exceptions
from ..utils import checkdir from vdirsyncer.utils import atomic_write
from ..utils import expand_path from vdirsyncer.utils import checkdir
from ..utils import open_graphical_browser from vdirsyncer.utils import expand_path
from vdirsyncer.utils import open_graphical_browser
from . import base from . import base
from . import dav from . import dav
from .google_helpers import _RedirectWSGIApp from .google_helpers import _RedirectWSGIApp
@ -98,6 +99,7 @@ class GoogleSession(dav.DAVSession):
token_updater=self._save_token, token_updater=self._save_token,
connector=self.connector, connector=self.connector,
connector_owner=False, connector_owner=False,
trust_env=True,
) )
async def _init_token(self): async def _init_token(self):
@ -128,7 +130,7 @@ class GoogleSession(dav.DAVSession):
async with self._session as session: async with self._session as session:
# Fail fast if the address is occupied # Fail fast if the address is occupied
authorization_url, state = session.authorization_url( authorization_url, _state = session.authorization_url(
TOKEN_URL, TOKEN_URL,
# access_type and approval_prompt are Google specific # access_type and approval_prompt are Google specific
# extra parameters. # extra parameters.

View file

@ -7,9 +7,9 @@ from __future__ import annotations
import logging import logging
import wsgiref.simple_server import wsgiref.simple_server
import wsgiref.util import wsgiref.util
from collections.abc import Iterable
from typing import Any from typing import Any
from typing import Callable from typing import Callable
from typing import Iterable
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -1,24 +1,29 @@
from __future__ import annotations from __future__ import annotations
import logging
import subprocess
import urllib.parse as urlparse import urllib.parse as urlparse
import aiohttp import aiohttp
from .. import exceptions from vdirsyncer import exceptions
from ..http import USERAGENT from vdirsyncer.http import USERAGENT
from ..http import prepare_auth from vdirsyncer.http import prepare_auth
from ..http import prepare_client_cert from vdirsyncer.http import prepare_client_cert
from ..http import prepare_verify from vdirsyncer.http import prepare_verify
from ..http import request from vdirsyncer.http import request
from ..vobject import Item from vdirsyncer.vobject import Item
from ..vobject import split_collection from vdirsyncer.vobject import split_collection
from .base import Storage from .base import Storage
logger = logging.getLogger(__name__)
class HttpStorage(Storage): class HttpStorage(Storage):
storage_name = "http" storage_name = "http"
read_only = True read_only = True
_repr_attributes = ["username", "url"] _repr_attributes = ("username", "url")
_items = None _items = None
# Required for tests. # Required for tests.
@ -34,6 +39,7 @@ class HttpStorage(Storage):
useragent=USERAGENT, useragent=USERAGENT,
verify_fingerprint=None, verify_fingerprint=None,
auth_cert=None, auth_cert=None,
filter_hook=None,
*, *,
connector, connector,
**kwargs, **kwargs,
@ -56,6 +62,7 @@ class HttpStorage(Storage):
self.useragent = useragent self.useragent = useragent
assert connector is not None assert connector is not None
self.connector = connector self.connector = connector
self._filter_hook = filter_hook
collection = kwargs.get("collection") collection = kwargs.get("collection")
if collection is not None: if collection is not None:
@ -66,6 +73,19 @@ class HttpStorage(Storage):
def _default_headers(self): def _default_headers(self):
return {"User-Agent": self.useragent} return {"User-Agent": self.useragent}
def _run_filter_hook(self, raw_item):
try:
result = subprocess.run(
[self._filter_hook],
input=raw_item,
capture_output=True,
encoding="utf-8",
)
return result.stdout
except OSError as e:
logger.warning(f"Error executing external command: {e!s}")
return raw_item
async def list(self): async def list(self):
async with aiohttp.ClientSession( async with aiohttp.ClientSession(
connector=self.connector, connector=self.connector,
@ -82,8 +102,13 @@ class HttpStorage(Storage):
) )
self._items = {} self._items = {}
for item in split_collection((await r.read()).decode("utf-8")): for raw_item in split_collection((await r.read()).decode("utf-8")):
item = Item(item) if self._filter_hook:
raw_item = self._run_filter_hook(raw_item)
if not raw_item:
continue
item = Item(raw_item)
if self._ignore_uids: if self._ignore_uids:
item = item.with_uid(item.hash) item = item.with_uid(item.hash)
@ -92,11 +117,12 @@ class HttpStorage(Storage):
for href, (_, etag) in self._items.items(): for href, (_, etag) in self._items.items():
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
if self._items is None: if self._items is None:
async for _ in self.list(): async for _ in self.list():
pass pass
assert self._items is not None # type assertion
try: try:
return self._items[href] return self._items[href]
except KeyError: except KeyError:

View file

@ -2,7 +2,9 @@ from __future__ import annotations
import random import random
from .. import exceptions from vdirsyncer import exceptions
from vdirsyncer.vobject import Item
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -33,7 +35,7 @@ class MemoryStorage(Storage):
for href, (etag, _item) in self.items.items(): for href, (etag, _item) in self.items.items():
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
etag, item = self.items[href] etag, item = self.items[href]
return item, etag return item, etag

View file

@ -6,24 +6,31 @@ import functools
import glob import glob
import logging import logging
import os import os
from typing import Iterable from collections.abc import Iterable
from atomicwrites import atomic_write from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkfile
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from vdirsyncer.vobject import join_collection
from vdirsyncer.vobject import split_collection
from .. import exceptions
from ..utils import checkfile
from ..utils import expand_path
from ..utils import get_etag_from_file
from ..utils import uniq
from ..vobject import Item
from ..vobject import join_collection
from ..vobject import split_collection
from .base import Storage from .base import Storage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _writing_op(f): def _writing_op(f):
"""Implement at_once for write operations.
Wrap an operation which writes to the storage, implementing `at_once` if it has been
requested. Changes are stored in-memory until the at_once block finishes, at which
time they are all written at once.
"""
@functools.wraps(f) @functools.wraps(f)
async def inner(self, *args, **kwargs): async def inner(self, *args, **kwargs):
if self._items is None or not self._at_once: if self._items is None or not self._at_once:
@ -40,7 +47,7 @@ def _writing_op(f):
class SingleFileStorage(Storage): class SingleFileStorage(Storage):
storage_name = "singlefile" storage_name = "singlefile"
_repr_attributes = ["path"] _repr_attributes = ("path",)
_write_mode = "wb" _write_mode = "wb"
_append_mode = "ab" _append_mode = "ab"
@ -95,7 +102,7 @@ class SingleFileStorage(Storage):
path = path % (collection,) path = path % (collection,)
except TypeError: except TypeError:
raise ValueError( raise ValueError(
"Exactly one %s required in path " "if collection is not null." "Exactly one %s required in path if collection is not null."
) )
checkfile(path, create=True) checkfile(path, create=True)
@ -126,11 +133,12 @@ class SingleFileStorage(Storage):
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
if self._items is None or not self._at_once: if self._items is None or not self._at_once:
async for _ in self.list(): async for _ in self.list():
pass pass
assert self._items is not None # type assertion
try: try:
return self._items[href] return self._items[href]
except KeyError: except KeyError:

View file

@ -16,11 +16,11 @@ import contextlib
import itertools import itertools
import logging import logging
from vdirsyncer.exceptions import UserError
from vdirsyncer.storage.base import Storage from vdirsyncer.storage.base import Storage
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from ..exceptions import UserError
from ..utils import uniq
from .exceptions import BothReadOnly from .exceptions import BothReadOnly
from .exceptions import IdentAlreadyExists from .exceptions import IdentAlreadyExists
from .exceptions import PartialSync from .exceptions import PartialSync
@ -136,9 +136,13 @@ async def sync(
raise BothReadOnly raise BothReadOnly
if conflict_resolution == "a wins": if conflict_resolution == "a wins":
conflict_resolution = lambda a, b: a # noqa: E731
def conflict_resolution(a, b):
return a
elif conflict_resolution == "b wins": elif conflict_resolution == "b wins":
conflict_resolution = lambda a, b: b # noqa: E731
def conflict_resolution(a, b):
return b
status_nonempty = bool(next(status.iter_old(), None)) status_nonempty = bool(next(status.iter_old(), None))

View file

@ -1,6 +1,6 @@
from __future__ import annotations from __future__ import annotations
from .. import exceptions from vdirsyncer import exceptions
class SyncError(exceptions.Error): class SyncError(exceptions.Error):

View file

@ -169,6 +169,11 @@ class SqliteStatus(_StatusBase):
); """ ); """
) )
def close(self):
if self._c:
self._c.close()
self._c = None
def _is_latest_version(self): def _is_latest_version(self):
try: try:
return bool( return bool(

View file

@ -1,8 +1,10 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import functools import functools
import os import os
import sys import sys
import tempfile
import uuid import uuid
from inspect import getfullargspec from inspect import getfullargspec
from typing import Callable from typing import Callable
@ -22,8 +24,7 @@ _missing = object()
def expand_path(p: str) -> str: def expand_path(p: str) -> str:
"""Expand $HOME in a path and normalise slashes.""" """Expand $HOME in a path and normalise slashes."""
p = os.path.expanduser(p) p = os.path.expanduser(p)
p = os.path.normpath(p) return os.path.normpath(p)
return p
def split_dict(d: dict, f: Callable): def split_dict(d: dict, f: Callable):
@ -76,7 +77,7 @@ def get_storage_init_specs(cls, stop_at=object):
spec = getfullargspec(cls.__init__) spec = getfullargspec(cls.__init__)
traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True) traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True)
if traverse_superclass: if traverse_superclass:
if traverse_superclass is True: # noqa if traverse_superclass is True:
supercls = next( supercls = next(
getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:] getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:]
) )
@ -86,7 +87,7 @@ def get_storage_init_specs(cls, stop_at=object):
else: else:
superspecs = () superspecs = ()
return (spec,) + superspecs return (spec, *superspecs)
def get_storage_init_args(cls, stop_at=object): def get_storage_init_args(cls, stop_at=object):
@ -125,12 +126,13 @@ def checkdir(path: str, create: bool = False, mode: int = 0o750) -> None:
raise exceptions.CollectionNotFound(f"Directory {path} does not exist.") raise exceptions.CollectionNotFound(f"Directory {path} does not exist.")
def checkfile(path, create=False): def checkfile(path, create=False) -> None:
""" """Check whether ``path`` is a file.
Check whether ``path`` is a file.
:param create: Whether to create the file's parent directories if they do :param create: Whether to create the file's parent directories if they do
not exist. not exist.
:raises CollectionNotFound: if path does not exist.
:raises OSError: if path exists but is not a file.
""" """
checkdir(os.path.dirname(path), create=create) checkdir(os.path.dirname(path), create=create)
if not os.path.isfile(path): if not os.path.isfile(path):
@ -143,24 +145,6 @@ def checkfile(path, create=False):
raise exceptions.CollectionNotFound(f"File {path} does not exist.") raise exceptions.CollectionNotFound(f"File {path} does not exist.")
class cached_property:
"""A read-only @property that is only evaluated once. Only usable on class
instances' methods.
"""
def __init__(self, fget, doc=None):
self.__name__ = fget.__name__
self.__module__ = fget.__module__
self.__doc__ = doc or fget.__doc__
self.fget = fget
def __get__(self, obj, cls):
if obj is None: # pragma: no cover
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
def href_safe(ident, safe=SAFE_UID_CHARS): def href_safe(ident, safe=SAFE_UID_CHARS):
return not bool(set(ident) - set(safe)) return not bool(set(ident) - set(safe))
@ -174,8 +158,7 @@ def generate_href(ident=None, safe=SAFE_UID_CHARS):
""" """
if not ident or not href_safe(ident, safe): if not ident or not href_safe(ident, safe):
return str(uuid.uuid4()) return str(uuid.uuid4())
else: return ident
return ident
def synchronized(lock=None): def synchronized(lock=None):
@ -208,7 +191,7 @@ def open_graphical_browser(url, new=0, autoraise=True):
cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"} cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"}
if webbrowser._tryorder is None: # Python 3.7 if webbrowser._tryorder is None: # Python 3.8
webbrowser.register_standard_browsers() webbrowser.register_standard_browsers()
for name in webbrowser._tryorder: for name in webbrowser._tryorder:
@ -219,4 +202,28 @@ def open_graphical_browser(url, new=0, autoraise=True):
if browser.open(url, new, autoraise): if browser.open(url, new, autoraise):
return return
raise RuntimeError("No graphical browser found. Please open the URL " "manually.") raise RuntimeError("No graphical browser found. Please open the URL manually.")
@contextlib.contextmanager
def atomic_write(dest, mode="wb", overwrite=False):
if "w" not in mode:
raise RuntimeError("`atomic_write` requires write access")
fd, src = tempfile.mkstemp(prefix=os.path.basename(dest), dir=os.path.dirname(dest))
file = os.fdopen(fd, mode=mode)
try:
yield file
except Exception:
os.unlink(src)
raise
else:
file.flush()
file.close()
if overwrite:
os.rename(src, dest)
else:
os.link(src, dest)
os.unlink(src)

View file

@ -1,10 +1,10 @@
from __future__ import annotations from __future__ import annotations
import hashlib import hashlib
from functools import cached_property
from itertools import chain from itertools import chain
from itertools import tee from itertools import tee
from .utils import cached_property
from .utils import uniq from .utils import uniq
IGNORE_PROPS = ( IGNORE_PROPS = (
@ -188,7 +188,7 @@ def join_collection(items, wrappers=_default_join_wrappers):
""" """
items1, items2 = tee((_Component.parse(x) for x in items), 2) items1, items2 = tee((_Component.parse(x) for x in items), 2)
item_type, wrapper_type = _get_item_type(items1, wrappers) _item_type, wrapper_type = _get_item_type(items1, wrappers)
wrapper_props = [] wrapper_props = []
def _get_item_components(x): def _get_item_components(x):
@ -231,8 +231,7 @@ def _get_item_type(components, wrappers):
if not i: if not i:
return None, None return None, None
else: raise ValueError("Not sure how to join components.")
raise ValueError("Not sure how to join components.")
class _Component: class _Component:
@ -303,10 +302,9 @@ class _Component:
if multiple: if multiple:
return rv return rv
elif len(rv) != 1: if len(rv) != 1:
raise ValueError(f"Found {len(rv)} components, expected one.") raise ValueError(f"Found {len(rv)} components, expected one.")
else: return rv[0]
return rv[0]
def dump_lines(self): def dump_lines(self):
yield f"BEGIN:{self.name}" yield f"BEGIN:{self.name}"
@ -323,13 +321,12 @@ class _Component:
for line in lineiter: for line in lineiter:
if line.startswith(prefix): if line.startswith(prefix):
break break
else: new_lines.append(line)
new_lines.append(line)
else: else:
break break
for line in lineiter: for line in lineiter:
if not line.startswith((" ", "\t")): if not line.startswith((" ", "\t", *prefix)):
new_lines.append(line) new_lines.append(line)
break break
@ -347,10 +344,9 @@ class _Component:
return obj not in self.subcomponents and not any( return obj not in self.subcomponents and not any(
obj in x for x in self.subcomponents obj in x for x in self.subcomponents
) )
elif isinstance(obj, str): if isinstance(obj, str):
return self.get(obj, None) is not None return self.get(obj, None) is not None
else: raise ValueError(obj)
raise ValueError(obj)
def __getitem__(self, key): def __getitem__(self, key):
prefix_without_params = f"{key}:" prefix_without_params = f"{key}:"
@ -360,7 +356,7 @@ class _Component:
if line.startswith(prefix_without_params): if line.startswith(prefix_without_params):
rv = line[len(prefix_without_params) :] rv = line[len(prefix_without_params) :]
break break
elif line.startswith(prefix_with_params): if line.startswith(prefix_with_params):
rv = line[len(prefix_with_params) :].split(":", 1)[-1] rv = line[len(prefix_with_params) :].split(":", 1)[-1]
break break
else: else: