Compare commits

..

192 commits
0.19.0 ... main

Author SHA1 Message Date
Hugo Osvaldo Barrera
c3262d88cc Mark unused variables as such 2025-11-06 01:05:48 +01:00
cbb4e314f6 cli/discover: add implicit config to pair for collection creation
Adds support for auto-creating collections when they exist only on
one side and `implicit = 'create'` is set in the pair config.
2025-11-06 00:04:22 +00:00
Hugo Osvaldo Barrera
ac9919d865 Add changelog entry for latest commits 2025-10-09 11:05:33 +02:00
samm81
b124ce835b fix: remove unused import 2025-10-09 09:02:01 +00:00
samm81
6708dbbbdc fix: fix ssl behavior in request
- `ClientConnectionError` in `aiohttp` can wrap SSL handshake and
  certificate verification errors
- Retrying those hides the real cause and produced
  `TransientNetworkError` instead of the expected certificate error
- Removing `ClientConnectionError` from the transient list lets SSL
  errors surface correctly
2025-10-09 09:02:01 +00:00
samm81
81d8444810 http: refactor auth loop 2025-10-09 09:02:01 +00:00
samm81
4990cdf229 http: retry safe DAV methods on transient aiohttp disconnects; cli: gather with return_exceptions to allow in-flight backoffs to finish
- Retry ServerDisconnectedError/ServerTimeoutError/ClientConnectionError/asyncio.TimeoutError for GET/HEAD/OPTIONS/PROPFIND/REPORT
- Keep original rate-limit handling (429, Google 403 usageLimits)
- In CLI, avoid cancelling sibling tasks so per-request backoff can complete; re-raise first failure after all tasks finish
2025-10-09 09:02:01 +00:00
Hugo Osvaldo Barrera
4c2c60402e ci: run ruff and mypy
Fixes: https://github.com/pimutils/vdirsyncer/issues/1194
2025-09-20 13:53:39 +02:00
Hugo Osvaldo Barrera
2f4f4ac72b Fix some mypy type failures 2025-09-20 13:51:21 +02:00
Hugo Osvaldo Barrera
6354db82c4 make: install check requirements via install-dev 2025-09-20 13:19:41 +02:00
Hugo Osvaldo Barrera
a9b6488dac Merge docs-requirements.txt into pyproject.toml
Keep requirements definitions all in one place.
2025-09-20 13:18:17 +02:00
Hugo Osvaldo Barrera
a4ceabf80b Organise imports
And update imports from deprecated locations.
2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
3488f77cd6 Remove unused variables 2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
19120422a7 Use ternary operator for trivial assignment 2025-09-20 13:05:14 +02:00
Hugo Osvaldo Barrera
2e619806a0 Drop support for Python 3.8
Note that recent commits introduced syntax unsupported by Python 3.8
already.
2025-09-20 13:05:03 +02:00
Hugo Osvaldo Barrera
4669bede07 Organise imports 2025-09-20 12:56:22 +02:00
Hugo Osvaldo Barrera
59c1c55407 Document wrapper 2025-09-20 12:50:00 +02:00
Hugo Osvaldo Barrera
1502f5b5f4 Execute one assertion per line 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
a4d4bf8fd1 Normalise pytest syntax 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
aab70e9fb0 Use cached_property from the stdlib
Our local implementation preceded the one in the stdlib, but we no
longer support versions of Python which do not ship it.
2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
ed88406aec Avoid using mutable class attributes
A tuple works fine here.
2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
ffe883a2f1 Avoid warning due to unused import 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
e5f2869580 ruff: ignore block for legacy Python 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
95bb7bd7f9 Declare functions instead of assigning to lambdas 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
e3b2473383 Use list expansion instead of concatenation 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
424cfc5799 ruff: ignore false positive 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
29312e87c5 Close status even if assertions fail 2025-09-20 12:45:56 +02:00
Hugo Osvaldo Barrera
c77b22334a Add changelog entry for latest change 2025-09-20 12:42:10 +02:00
samm81
02350c924b
http(request): collates status checks 2025-09-13 16:23:46 +07:00
Samuel Maynard
605f878f9b
test_retry: remove unneeded decorator
Co-authored-by: Hugo <hugo@whynothugo.nl>
2025-09-13 12:22:25 +03:00
samm81
bb2b71da81
builds(archlinux-py313): adds python-tenacity package 2025-09-12 17:02:28 +07:00
samm81
065ebe4752
AUTHORS: add samm81 2025-09-12 16:25:00 +07:00
samm81
0d741022a9
http: add rate limiting (mainly for google)
- google calendar uses the `403` and `429` codes to perform rate limiting [1][2]. this pr adds `tenacity` to perform exponential back off as suggested in google calendar's docs [3].

[1]: https://developers.google.com/workspace/calendar/api/guides/errors#403_rate_limit_exceeded
[2]: https://developers.google.com/workspace/calendar/api/guides/errors#429_too_many_requests
[3]: https://developers.google.com/workspace/calendar/api/guides/quota#backoff
2025-09-12 16:20:44 +07:00
Hugo Osvaldo Barrera
b5d3b7e578 Apply auto-fixes for RUF rule 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
9677cf9812 Simplify some statements 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
6da84c7881 ruff: sort rules 2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
dceb113334 ruff: fix mix-up in configuration
Ruff hasn't been finding errors in a while. Most of them are linting
checks anyway, but there was quite a bit of deprecated usages.
2025-08-29 10:17:44 +02:00
Hugo Osvaldo Barrera
01fa614b6b Fix line which are too long 2025-08-29 10:17:41 +02:00
Hugo Osvaldo Barrera
20cc1247ed ruff: apply auto-fixes 2025-08-29 10:03:24 +02:00
Дилян Палаузов
2f548e048d Some code simplifications with the return statement 2025-08-29 09:48:27 +02:00
Jakub Klinkovský
5d343264f3 Remove python-requests-toolbelt from Arch Linux build
The dependency was dropped in 89a01631fa
2025-08-29 09:28:50 +02:00
Hugo Osvaldo Barrera
bc3fa8bd39 Remove stale references to setup.py 2025-08-28 22:57:55 +02:00
Hugo Osvaldo Barrera
8803d5a086 ruff: use extend-select
Ensure that we don't disable any default rules.
2025-08-28 11:40:46 +02:00
Hugo Osvaldo Barrera
96754a3d0a ruff: enable TID rules 2025-08-28 11:39:06 +02:00
Hugo Osvaldo Barrera
d42707c108 Bump constraint for aiostream
There's a newer version available, and it also doesn't have any breaking
changes which could affect us.
2025-08-28 11:37:14 +02:00
Hugo Osvaldo Barrera
ddfe3cc749 Bump constraint for aiostream
Fixes: https://github.com/pimutils/vdirsyncer/issues/1111
2025-08-28 11:32:28 +02:00
Radon Rosborough
84ff0ac943 Log error response body in debug 2025-08-27 09:11:32 +02:00
Hugo Osvaldo Barrera
388c16f188 Document sqlite fix in changelog 2025-08-25 17:37:26 +02:00
Hugo Osvaldo Barrera
78f41d32ce Explicitly close status database
Using `__del__` often closes the database on a different thread, which
is not supported by the sqlite module and produces a different warning.

Explicitly close the status database everywhere it is used.
2025-08-25 17:33:20 +02:00
Hugo Osvaldo Barrera
164559ad7a Remove references to obsolete event_loop fixture
It's gone from the latest pytest-asyncio.
2025-08-25 17:12:21 +02:00
samm81
2c6dc4cddf updates SqliteStatus to properly close connections
otherwise, when trying to run `pytest` in a `python3.13` environment
results in a bunch of

```
tests/unit/sync/test_sync.py::test_partial_sync_ignore
  /home/user/.asdf/installs/python/3.13.1/lib/python3.13/asyncio/base_events.py:650: ResourceWarning: unclosed database in <sqlite3.Connection object at 0x7fda8f6b6c50>
    sys.set_asyncgen_hooks(
  Enable tracemalloc to get traceback where the object was allocated.
  See https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings for more info.
  ```
2025-08-25 16:53:00 +02:00
samm81
9bbb7fa91a fix: fix mypy typing error 2025-08-25 16:51:29 +02:00
Hugo Osvaldo Barrera
f8bcafa9d7 ci: use Alpine 3.19 for Python 3.11 2025-08-25 16:49:16 +02:00
Hugo Osvaldo Barrera
162879df21 ci: include python version in job name 2025-07-23 23:24:35 +02:00
Hugo Osvaldo Barrera
3b9db0e4db Add support for Python 3.13
Fixes: https://github.com/pimutils/vdirsyncer/issues/1180
2025-07-23 23:23:59 +02:00
Hugo Osvaldo Barrera
63d2e6c795 pyproject: squelch warning 2025-04-11 01:59:29 +02:00
Hugo Osvaldo Barrera
03d1c4666d pyproject: update syntax for licence 2025-04-11 01:59:17 +02:00
Hugo Osvaldo Barrera
ecdd565be4 Document checkfile() 2025-04-09 14:00:40 +02:00
Hugo Osvaldo Barrera
17e43fd633 Move test dependencies into pyproject.toml 2025-04-07 18:47:44 +02:00
Hugo Osvaldo Barrera
2b4496fea4 Update linting tools 2025-04-07 18:42:07 +02:00
Hugo Osvaldo Barrera
fc4a02c0c9 Add some missing type hints 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
c19802e4d8 Configure ruff as an auto-formatter 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
cce8fef8de Auto-format using ruff 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
9a0dbc8cd0 Update ruff configuration syntax 2025-04-07 18:40:34 +02:00
Hugo Osvaldo Barrera
32453cccfc Drop support for Python 3.7
Installing on Python 3.7 no longer works due to lack of support in the
minimal version of setuptools_scm. This commit makes the change
official, but it happened a while ago.
2025-04-07 18:39:52 +02:00
Hugo Osvaldo Barrera
057f3af293 Remove stale GitLab CI config 2025-04-07 18:35:12 +02:00
Hugo Osvaldo Barrera
e76d8a5b03 Add two more trove classifiers 2025-04-07 18:09:36 +02:00
Hugo Osvaldo Barrera
d8961232c4 Remove setup.py in favour of pyproject.toml
Implements: https://github.com/pimutils/vdirsyncer/issues/1164
2025-04-07 18:06:45 +02:00
Hugo Osvaldo Barrera
646e0b48a5 Delete stale comment 2025-04-07 18:01:16 +02:00
Hugo Osvaldo Barrera
fb6a859b88 Add changelog entry for recent change 2025-04-07 17:39:18 +02:00
Petr Moucha
ff999b5b74 Use proxy configuration from environment for Google storage 2025-04-04 13:17:32 +02:00
Hugo Osvaldo Barrera
41b48857eb Remove reference to dead domain 2025-03-06 11:57:05 +01:00
Hugo Osvaldo Barrera
70d09e6d5d Remove stale comment 2025-02-13 13:42:06 +01:00
Ben Boeckel
8b063c39cb atomicwrites: remove dependency on abandoned library 2025-02-13 13:37:06 +01:00
Hugo Osvaldo Barrera
12a06917db Add explicit configuration for readthedocs
See: https://about.readthedocs.com/blog/2024/12/deprecate-config-files-without-sphinx-or-mkdocs-config/
2025-02-13 13:34:13 +01:00
Hugo Osvaldo Barrera
2fee1d67f2 Update CI job with "oldest supported dependencies"
Alpine 3.17 has faded away, bump to Alpine 3.18.
2025-02-13 13:32:59 +01:00
Hugo Osvaldo Barrera
a934d5ec66 Keep test for duplicate consecutive keys
See: https://github.com/pimutils/vdirsyncer/pull/1153
2024-12-21 16:49:50 +01:00
Colin Watson
c79d3680cd Fix _Component.__delitem__ with adjacent identical keys
Hypothesis found the following example:

```
tests/unit/utils/test_vobject.py:335: in add_prop
    assert c[key] == value
E   AssertionError: assert '0' == '1'
E
E     - 1
E     + 0
E   Falsifying example:
E   state = VobjectMachine()
E   unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
E   parsed_0 = state.parse(unparsed=unparsed_0)
E   state.add_prop_raw(c=parsed_0, key='0', params=[], value='0')
E   state.add_prop_raw(c=parsed_0, key='0', params=[], value='0')
E   state.add_prop(c=parsed_0, key='0', value='1')
E   state.teardown()
```

After the two `add_prop_raw` calls, `c.props` is `["0;:0", "0;:0",
"FOO:YES"]`.  `_Component.__delitem__` then fails to effectively delete
the previous key: it deletes the first `"0;:0"` item, but then checks
for continuation lines following it and incorrectly keeps the second
`"0;:0"` item even though it begins with one of the prefixes it's trying
to delete.  Checking for the prefix in the check for continuation lines
fixes this.

Fixes: #1149
2024-12-20 01:43:15 +00:00
Hugo Osvaldo Barrera
cd050d57b9 Use direnv to set up a virtualenv for development 2024-12-09 14:18:24 +01:00
Hugo Osvaldo Barrera
8c98992f74 Move setuptools-scm config into pyproject.toml 2024-12-09 14:18:06 +01:00
Hugo Osvaldo Barrera
c2eed9fb59 Add a readthedocs configuration file
Used for building docs in CI pipelines.
2024-12-09 01:36:22 +01:00
Mike A.
a490544405 Do not load netrc config files 2024-12-09 01:32:29 +01:00
Hugo Osvaldo Barrera
688d6f907f Update deprecated usages of hypothesis 2024-12-09 01:30:44 +01:00
euxane
2e7e31fdbf storage/http: add support for filter_hook
This allows users to process fetched items through a filter command,
to fix malformed webcal items as they are imported.

In my case, my provider adds the export time to the description and
random sequence numbers to all events. This caused the whole collection
to be invalidated and propagated at each sync. I use the filter to
remove those, canonicalising the items.
2024-12-08 19:31:32 +01:00
Arran Ubels
616d7aacb0 OfflineIMAP url Update 2024-10-31 22:43:45 +01:00
Hugo Osvaldo Barrera
89129e37b6 Typo
Fixes: https://github.com/pimutils/vdirsyncer/issues/1139
2024-09-13 18:36:17 +02:00
Hugo Osvaldo Barrera
88722ef4b7 Add changelog entry for Digest Auth 2024-09-11 17:25:29 +02:00
Mike A.
35f299679f Rewrite guess auth test for unsupported status 2024-09-11 12:04:05 +02:00
Mike A.
67e1c0ded5 Make tests pass 2024-09-11 12:04:05 +02:00
Mike A.
89a01631fa Remove requests_toolbelt 2024-09-11 12:04:05 +02:00
Mike A.
611b8667a3 Implement digest auth 2024-09-11 12:04:05 +02:00
Hugo Osvaldo Barrera
8550475548 Formatting 2024-08-26 12:49:36 +02:00
Hugo Osvaldo Barrera
cd2445b991 Upgrade Alpine release used in CI 2024-08-26 12:49:24 +02:00
Jakub Klinkovský
5ca2742271 Add short option for the help option 2024-08-26 12:43:20 +02:00
Jakub Klinkovský
5ac9dcec29 Update documentation regarding SSL pinning by fingerprint 2024-08-16 15:18:18 +02:00
octvs
a513a7e4fa docs: update config info on todoman tutorial 2024-04-02 15:30:16 +02:00
Dick Marinus
5ae05245e6 fix pylint W0621: Redefining name 'main' from outer scope (line 68) (redefined-outer-name) 2024-03-19 09:59:08 +01:00
Hugo Osvaldo Barrera
055ed120dd Pre-commit autoupdate 2024-02-20 15:08:51 +01:00
Hugo Osvaldo Barrera
31816dc652 Add some type hints 2024-02-20 15:08:51 +01:00
Bleala
2e023a5feb Update AUTHORS.rst 2024-02-16 14:29:11 +01:00
Bleala
14afe16a13 Update CHANGELOG.rst 2024-02-16 14:29:11 +01:00
Bleala
5766e1c501 Add Docker Environment 2024-02-16 14:29:11 +01:00
Xavier Brochard
fade399a21 more explanations of "collection" meaning 2024-02-02 10:26:49 +01:00
Xavier Brochard
3433f8a034 A bit more explanation of "from a" and "from b" 2024-02-02 10:25:10 +01:00
chrisblech
6a3077f9dc add pre_deletion_hook
closes https://github.com/pimutils/vdirsyncer/issues/1107
2024-01-31 19:14:59 +01:00
Hugo Osvaldo Barrera
42c5dba208 Pre-commit autoupdate 2024-01-31 19:08:25 +01:00
Hugo Osvaldo Barrera
7991419ab1 Merge implicitly concatenated strings 2024-01-31 19:08:25 +01:00
Hugo Osvaldo Barrera
03e6afe9dc Remove broken contact link
Fixes: https://github.com/pimutils/vdirsyncer/issues/1104
2024-01-28 20:15:08 +01:00
Hugo
762d369560
Merge pull request #1103 from jasonccox/main
Require matching BEGIN and END lines in vobjects
2024-01-28 20:13:28 +01:00
Hugo Osvaldo Barrera
2396c46b04 Allow specifying deb distro/ver via env vars 2023-12-18 14:35:48 +01:00
Hugo Osvaldo Barrera
b626236128 Use docker (instead of podman) to build debs 2023-12-18 14:35:48 +01:00
Hugo Osvaldo Barrera
45b67122fe Fast-mail publishing if credentials are missing 2023-12-18 14:35:48 +01:00
Jason Cox
7a387b8efe Require matching BEGIN and END lines in vobjects
Raise an error when parsing a vobject that has mismatched `BEGIN` and
`END` lines (e.g., `BEGIN:FOO` followed by `END:BAR`) or missing `END`
lines (e.g., `BEGIN:FOO` with no subsequent `END:FOO`).

Fixes #1102.
2023-12-13 10:31:32 -05:00
Kai Herlemann
889e1f9ea2 Implement a no_delete flag
See: https://github.com/pimutils/vdirsyncer/pull/1090
2023-11-29 23:50:32 +08:00
azrdev
d1f93ea0be docs: add instructions to get pw from environment variable
tested with vdirsyncer 0.19.2 on archlinux
2023-11-26 08:20:20 +01:00
Hugo Osvaldo Barrera
82fd03be64 Clarify that pipx won't install man pages
And reword the section a bit.
2023-11-16 07:19:15 +08:00
Hugo Osvaldo Barrera
b50f9def00 Ensure type annotations are backwards compatible
Related: https://github.com/pimutils/todoman/issues/544
2023-10-29 16:04:23 +01:00
Hugo Osvaldo Barrera
91c16b3215 Add a changelog entry for vcard 4.0 support 2023-10-07 03:26:04 +02:00
wrvsrx
d45ae04006 Update doc about use_vcard_4 2023-10-06 23:18:26 +02:00
wrvsrx
9abf9c8e45 Add an option to use vCard 4.0
Fix #503
2023-10-06 23:18:21 +02:00
Hugo Osvaldo Barrera
0f0e5b97d3 Ignore type checking lines in coverage report 2023-09-25 16:24:52 +02:00
Hugo Osvaldo Barrera
301aa0e16f pre-commit run --all 2023-09-24 12:41:56 +02:00
Hugo Osvaldo Barrera
dcd3b7a359 pre-commit autoupdate 2023-09-24 12:35:40 +02:00
Hugo Osvaldo Barrera
df8c4a1cf5 pre-commit: fix ruff hook being a no-op 2023-09-24 12:35:16 +02:00
suiso67
5a17ec1bba Fix wrong document formatting 2023-09-08 13:03:00 +02:00
suiso67
ab3aa108fc Fix broken Arch Linux package link 2023-09-08 13:03:00 +02:00
Justin !
f194bb0a4c Do not allow None value if we assert they're not None on the next line
This change imply changing the `save_status` parameters order. If you
don't like that, I can drop this commit.
2023-08-23 16:20:21 +02:00
Justin !
c073d55b2f Don't allow load_status to return None 2023-08-23 16:20:21 +02:00
Justin !
3611e7d62f Add type hint to vdirsyncer/cli/utils.py 2023-08-23 16:20:21 +02:00
Jan Moeller
adc974bdd1 docs: add changelog for #1081 2023-08-06 12:45:42 +02:00
Jan Moeller
efad9eb624 fix(repair_collection): use DiscoverResult logic to discover collection
fixes error: `DAVSession.__init__() missing 1 required keyword-only argument:
'connector'`.
Reuses the existing logic in DiscoverResult to determine if the storage requires
a 'connector' arg.
2023-08-06 12:45:42 +02:00
pre-commit-ci[bot]
246568f149 [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2023-07-14 11:55:08 +02:00
Justin !
439f1e6f50 Run ruff --fix vdirsyncer 2023-07-14 11:55:08 +02:00
Justin !
ef8e8980d1 Add Typing annotation to cli/config.py 2023-07-14 11:55:08 +02:00
Justin !
08616abbb5 Add typing annotation to cli/__init__.py 2023-07-14 11:55:08 +02:00
Hugo Osvaldo Barrera
4237ff863c Slightly improve docs for configuring google 2023-07-13 12:34:57 +02:00
Hugo Osvaldo Barrera
1a6ad54543 ci: Standardise jobs a bit 2023-07-13 12:33:58 +02:00
Claudio Noguera
203468fd25 Update config.rst
This is the way it actually works.
With web application, a 400 is returned. With desktop it works fine
2023-07-13 12:32:00 +02:00
Hugo Osvaldo Barrera
6368af1365 ci: remove superfluous export
We're now using a virtualenv.
2023-06-26 19:25:53 +02:00
Hugo Osvaldo Barrera
b38306bdd0 ci: Ensure that minimal job runs on older Python
Fixes: https://github.com/pimutils/vdirsyncer/issues/1077
2023-06-26 19:25:40 +02:00
Hugo Osvaldo Barrera
d26557bee3 Python 3.10 and 3.11 are also supported
We've been running 3.11 on CI for a while now.
2023-06-26 19:04:23 +02:00
Hugo Osvaldo Barrera
b9f749467c Add forward-compatibility for storage type parameter
The Rust rewrite of vdirsyncer requires explicitly specifying what type
of "filesystem" storage is being used. These can be either
"filesystem/icalendar" or "filesystem/vcard".

Add forward-compatibility with this upcoming format, by allowing (but
ignoring) a slash and anything after it.

This makes configuration files that have been updated for the Rust
implementation compatible with the Python implementation.

Closes: https://github.com/pimutils/vdirsyncer/pull/1075
2023-06-26 19:01:52 +02:00
Hugo Osvaldo Barrera
7e5910a341 ci: use virtualenvs for jobs that use pip
Pip now refuses to tamper with the system python installation.
2023-06-26 19:00:32 +02:00
Hugo Osvaldo Barrera
7403182645 Update changelog with recent updates 2023-06-26 18:50:00 +02:00
Henning Sudbrock
bad381e5ba Fix link to GNU Guix package in documentation
Fixes #1071
2023-05-21 10:47:07 +02:00
pre-commit-ci[bot]
700586d959 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/psf/black: 23.1.0 → 23.3.0](https://github.com/psf/black/compare/23.1.0...23.3.0)
- [github.com/pre-commit/mirrors-mypy: v1.0.1 → v1.2.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.0.1...v1.2.0)
- [github.com/charliermarsh/ruff-pre-commit: v0.0.249 → v0.0.265](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.249...v0.0.265)
2023-05-09 11:52:48 +02:00
Hugo Osvaldo Barrera
c1d3efb6b8 Make broken test as xfail 2023-05-09 11:52:48 +02:00
Hugo Osvaldo Barrera
c55b969791 Add some type hints 2023-05-09 11:52:48 +02:00
Florian Preinstorfer
079a156bf8 Remove superflous string quotes 2023-03-30 14:03:32 +02:00
Hugo Osvaldo Barrera
242216d85a Brain typo 2023-03-28 23:23:44 +02:00
Hugo Osvaldo Barrera
b1ef68089b Properly populate cache during SingleFileStorage._at_once
The call to `list` was never awaited and the stream never drained, so
the cache remained empty.
2023-03-10 12:27:48 +01:00
Enrico Guiraud
85ae33955f
Prevent single file storage from performing unnecessary N^2 loop
For single file storage we wrap the logic of get_multi with the
at_once context manager so that `self.list()` (which is called by
`self.get()`) actually caches the items rather than re-computing
them at every call.

This should largely mitigate the performance issue describe at
https://github.com/pimutils/vdirsyncer/issues/818 . The issue
discussion also contains more background about this patch.
2023-03-09 17:59:20 -06:00
Timo Ludwig
54a90aa5dd Document caveats of Google contacts storage
- Group labels are not mapped to CATEGORIES property
- BDAY property is missing when date is incomplete
2023-03-06 09:57:00 +01:00
Hugo Osvaldo Barrera
443ae3d3e7 Fix crash when using auth certs
Fixes: https://github.com/pimutils/vdirsyncer/issues/1033
2023-02-28 16:21:28 +01:00
pre-commit-ci[bot]
3bf9a3d684 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/pre-commit/mirrors-mypy: v1.0.0 → v1.0.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.0.0...v1.0.1)
- [github.com/charliermarsh/ruff-pre-commit: v0.0.246 → v0.0.249](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.246...v0.0.249)
2023-02-21 10:57:28 +01:00
Hugo Osvaldo Barrera
2138c43456 Update docs for Google storages
References: https://github.com/pimutils/vdirsyncer/pull/985
References: https://github.com/pimutils/vdirsyncer/issues/975
Closes: https://github.com/pimutils/vdirsyncer/issues/1028
Closes: https://github.com/pimutils/vdirsyncer/issues/808
2023-02-16 23:17:27 +01:00
Hugo Osvaldo Barrera
5a46c93987 mypy: Drop unnecessary rule exclusion 2023-02-16 16:51:12 +01:00
Hugo Osvaldo Barrera
180f91f0fe Move mypy config to pyproject.toml 2023-02-16 16:51:12 +01:00
Hugo Osvaldo Barrera
6443d37c97 Move pytest config to pyproject.toml 2023-02-16 16:51:12 +01:00
pre-commit-ci[bot]
13ca008380 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/pre-commit/mirrors-mypy: v0.991 → v1.0.0](https://github.com/pre-commit/mirrors-mypy/compare/v0.991...v1.0.0)
- [github.com/charliermarsh/ruff-pre-commit: v0.0.243 → v0.0.246](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.243...v0.0.246)
2023-02-14 10:09:36 +01:00
Hugo Osvaldo Barrera
24cb49f64c Remove superfluous exception parens 2023-02-10 16:57:39 +01:00
Hugo Osvaldo Barrera
defe8e2591 Fix broken BSD link
pkgsrc.se is no more.
2023-02-10 16:54:08 +01:00
pre-commit-ci[bot]
e11fa357ff [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2023-02-07 12:21:17 +01:00
pre-commit-ci[bot]
e20a65793e [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/psf/black: 22.12.0 → 23.1.0](https://github.com/psf/black/compare/22.12.0...23.1.0)
- [github.com/charliermarsh/ruff-pre-commit: v0.0.238 → v0.0.243](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.238...v0.0.243)
2023-02-07 12:21:17 +01:00
pre-commit-ci[bot]
df14865f43 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/psf/black: 22.10.0 → 22.12.0](https://github.com/psf/black/compare/22.10.0...22.12.0)
- [github.com/charliermarsh/ruff-pre-commit: v0.0.237 → v0.0.238](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.237...v0.0.238)
2023-01-31 10:09:31 +01:00
Hugo Osvaldo Barrera
f45ecf6ad0 Simplify management of documentation dependencies 2023-01-31 01:11:24 +01:00
Hugo Osvaldo Barrera
72bcef282d Remove another unnecessary wrapper 2023-01-31 01:11:24 +01:00
Hugo Osvaldo Barrera
3a56f26d05 Drop unnecessary wrapping 2023-01-31 01:11:24 +01:00
Hugo Osvaldo Barrera
4dd17c7f59 This make target is gone; use pytest directly 2023-01-31 01:11:04 +01:00
Hugo Osvaldo Barrera
73f2554932 Check typos via pre-commit
So this runs more often (and also in CI).
2023-01-31 01:11:04 +01:00
Hugo Osvaldo Barrera
627f574777 Remove unnecessary param 2023-01-31 01:11:04 +01:00
Hugo Osvaldo Barrera
37a7f9bea8 This test is not async 2023-01-31 00:21:04 +01:00
Hugo Osvaldo Barrera
d2d1532883 Remove indirection 2023-01-31 00:21:04 +01:00
Hugo Osvaldo Barrera
0dcef26b9d Update build jobs with more descriptive names 2023-01-31 00:21:04 +01:00
Hugo Osvaldo Barrera
d646357cd3 Use ruff for code checking and linting
`isort` is failing in pre-commit.ci right now, so this might be good
timing.

See: https://whynothugo.nl/journal/2023/01/20/notes-on-ruff/
2023-01-31 00:19:14 +01:00
Hugo Osvaldo Barrera
8c6c0be15a This mutation of the control variable is intended 2023-01-31 00:19:14 +01:00
Hugo Osvaldo Barrera
dfc29db312 Use dict literal instead of dict() call 2023-01-31 00:19:14 +01:00
Hugo Osvaldo Barrera
a41cf64b6c Update ArchLinux CI setup 2023-01-31 00:19:14 +01:00
Hugo Osvaldo Barrera
a2eda52b71 Hottub expect DOS-style extensions 2023-01-31 00:19:14 +01:00
Hugo Osvaldo Barrera
61006f0685 Improve installation documentation
`pipx` is a lot simpler on any setup where this it is available.
2023-01-26 18:53:19 +01:00
Hugo Osvaldo Barrera
9b48bccde2 Fix return type
Fixes: https://github.com/pimutils/vdirsyncer/issues/1036
2023-01-26 18:01:08 +01:00
Hugo Osvaldo Barrera
7c72caef3f docs: We're not using aiohttp, not requests 2023-01-26 10:43:46 +01:00
Hugo Osvaldo Barrera
0045b23800 Add missing changelog entry
See: https://github.com/pimutils/vdirsyncer/pull/1031
2023-01-13 16:37:32 +01:00
Hugo Osvaldo Barrera
c07fbc2053 Add missing changelog entry
See: https://github.com/pimutils/vdirsyncer/pull/1016
2023-01-13 16:36:45 +01:00
Daniele Ricci
e3485beb45 Enable environment variables for HTTP proxy 2023-01-13 16:36:36 +01:00
Tonus
0f83fd96d5 Add Slackware as build-able version
I maintain the build script for Slackware on the slackbuilds.org repo (endorsed by Slackware).
2023-01-07 01:39:12 +01:00
chrysle
8980a80560 Corrected installation steps for Ubuntu and pip 2023-01-06 14:50:27 +01:00
pre-commit-ci[bot]
90b6ce1d04 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/asottile/pyupgrade: v3.2.2 → v3.3.0](https://github.com/asottile/pyupgrade/compare/v3.2.2...v3.3.0)
2022-12-09 12:01:43 +01:00
waclaw66
7a801d3d5d Google Contacts discover fix 2022-12-09 12:00:52 +01:00
Hugo Osvaldo Barrera
2c44f7d773 Update flake8 comments to "new" format 2022-12-03 16:25:08 +01:00
Hugo Osvaldo Barrera
6506c86f58 Remove obsolete config value
This was used by flake8-import-order
2022-12-03 16:25:08 +01:00
pre-commit-ci[bot]
51b409017d [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/pre-commit/pre-commit-hooks: v4.3.0 → v4.4.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.3.0...v4.4.0)
- [github.com/PyCQA/flake8: 5.0.4 → 6.0.0](https://github.com/PyCQA/flake8/compare/5.0.4...6.0.0)
- [github.com/pre-commit/mirrors-mypy: v0.990 → v0.991](https://github.com/pre-commit/mirrors-mypy/compare/v0.990...v0.991)
2022-12-03 16:25:08 +01:00
95 changed files with 1772 additions and 968 deletions

View file

@ -5,16 +5,17 @@ packages:
- docker - docker
- docker-compose - docker-compose
# Build dependencies: # Build dependencies:
- python-pip
- python-wheel - python-wheel
- python-build
- python-installer
- python-setuptools-scm
# Runtime dependencies: # Runtime dependencies:
- python-atomicwrites
- python-click - python-click
- python-click-log - python-click-log
- python-click-threading - python-click-threading
- python-requests - python-requests
- python-requests-toolbelt
- python-aiohttp-oauthlib - python-aiohttp-oauthlib
- python-tenacity
# Test dependencies: # Test dependencies:
- python-hypothesis - python-hypothesis
- python-pytest-cov - python-pytest-cov
@ -34,11 +35,14 @@ environment:
REQUIREMENTS: release REQUIREMENTS: release
# TODO: ETESYNC_TESTS # TODO: ETESYNC_TESTS
tasks: tasks:
- setup: | - check-python:
python --version | grep 'Python 3.13'
- docker: |
sudo systemctl start docker sudo systemctl start docker
- setup: |
cd vdirsyncer cd vdirsyncer
python setup.py build python -m build --wheel --skip-dependency-check --no-isolation
sudo pip install --no-index . sudo python -m installer dist/*.whl
- test: | - test: |
cd vdirsyncer cd vdirsyncer
make -e ci-test make -e ci-test

View file

@ -3,11 +3,13 @@
# TODO: It might make more sense to test with an older Ubuntu or Fedora version # TODO: It might make more sense to test with an older Ubuntu or Fedora version
# here, and consider that our "oldest suppported environment". # here, and consider that our "oldest suppported environment".
image: archlinux image: alpine/3.19 # python 3.11
packages: packages:
- docker - docker
- docker-cli
- docker-compose - docker-compose
- python-pip - py3-pip
- python3-dev
sources: sources:
- https://github.com/pimutils/vdirsyncer - https://github.com/pimutils/vdirsyncer
environment: environment:
@ -16,15 +18,19 @@ environment:
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos DAV_SERVER: radicale xandikos
REQUIREMENTS: minimal REQUIREMENTS: minimal
# TODO: ETESYNC_TESTS
tasks: tasks:
- venv: |
python3 -m venv $HOME/venv
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
- docker: |
sudo addgroup $(whoami) docker
sudo service docker start
- setup: | - setup: |
sudo systemctl start docker
cd vdirsyncer cd vdirsyncer
# Hack, no idea why it's needed
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
make -e install-dev make -e install-dev
- test: | - test: |
cd vdirsyncer cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test make -e ci-test
make -e ci-test-storage make -e ci-test-storage

View file

@ -17,16 +17,21 @@ environment:
REQUIREMENTS: release REQUIREMENTS: release
# TODO: ETESYNC_TESTS # TODO: ETESYNC_TESTS
tasks: tasks:
- setup: | - venv: |
python -m venv $HOME/venv
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
- docker: |
sudo systemctl start docker sudo systemctl start docker
- setup: |
cd vdirsyncer cd vdirsyncer
make -e install-dev -e install-docs make -e install-dev
- test: | - test: |
cd vdirsyncer cd vdirsyncer
# Non-system python is used for packages:
export PATH=$PATH:~/.local/bin/
make -e ci-test make -e ci-test
make -e ci-test-storage make -e ci-test-storage
- check: |
cd vdirsyncer
make check
- check-secrets: | - check-secrets: |
# Stop here if this is a PR. PRs can't run with the below secrets. # Stop here if this is a PR. PRs can't run with the below secrets.
[ -f ~/fastmail-secrets ] || complete-build [ -f ~/fastmail-secrets ] || complete-build

1
.envrc Normal file
View file

@ -0,0 +1 @@
layout python3

View file

@ -1,6 +0,0 @@
python37:
image: python:3.7
before_script:
- make -e install-dev
script:
- make -e ci-test

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0 rev: v5.0.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
args: [--markdown-linebreak-ext=md] args: [--markdown-linebreak-ext=md]
@ -8,27 +8,8 @@ repos:
- id: check-toml - id: check-toml
- id: check-added-large-files - id: check-added-large-files
- id: debug-statements - id: debug-statements
- repo: https://github.com/PyCQA/flake8
rev: "5.0.4"
hooks:
- id: flake8
additional_dependencies: [flake8-import-order, flake8-bugbear]
- repo: https://github.com/psf/black
rev: "22.10.0"
hooks:
- id: black
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/asottile/pyupgrade
rev: v3.2.2
hooks:
- id: pyupgrade
args: [--py37-plus]
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: "v0.990" rev: "v1.15.0"
hooks: hooks:
- id: mypy - id: mypy
files: vdirsyncer/.* files: vdirsyncer/.*
@ -36,4 +17,23 @@ repos:
- types-setuptools - types-setuptools
- types-docutils - types-docutils
- types-requests - types-requests
- types-atomicwrites - repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.11.4'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
- repo: local
hooks:
- id: typos-syncroniz
name: typos-syncroniz
language: system
# Not how you spell "synchronise"
entry: sh -c "git grep -i syncroniz"
files: ".*/.*"
- id: typos-text-icalendar
name: typos-text-icalendar
language: system
# It's "text/calendar", no "i".
entry: sh -c "git grep -i 'text/icalendar'"
files: ".*/.*"

16
.readthedocs.yaml Normal file
View file

@ -0,0 +1,16 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
python:
install:
- method: pip
path: .
extra_requirements:
- docs

View file

@ -4,10 +4,13 @@ Contributors
In alphabetical order: In alphabetical order:
- Ben Boeckel - Ben Boeckel
- Bleala
- Christian Geier - Christian Geier
- Clément Mondon - Clément Mondon
- Corey Hinshaw - Corey Hinshaw
- Kai Herlemann
- Hugo Osvaldo Barrera - Hugo Osvaldo Barrera
- Jason Cox
- Julian Mehne - Julian Mehne
- Malte Kiefer - Malte Kiefer
- Marek Marczykowski-Górecki - Marek Marczykowski-Górecki
@ -16,6 +19,7 @@ In alphabetical order:
- rEnr3n - rEnr3n
- Thomas Weißschuh - Thomas Weißschuh
- Witcher01 - Witcher01
- samm81
Special thanks goes to: Special thanks goes to:

View file

@ -9,6 +9,60 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_. <https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.21.0
==============
- Implement retrying for ``google`` storage type when a rate limit is reached.
- ``tenacity`` is now a required dependency.
- Drop support for Python 3.8.
- Retry transient network errors for nullipotent requests.
Version 0.20.0
==============
- Remove dependency on abandoned ``atomicwrites`` library.
- Implement ``filter_hook`` for the HTTP storage.
- Drop support for Python 3.7.
- Add support for Python 3.12 and Python 3.13.
- Properly close the status database after using. This especially affects tests,
where we were leaking a large amount of file descriptors.
- Extend supported versions of ``aiostream`` to include 0.7.x.
Version 0.19.3
==============
- Added a no_delete option to the storage configuration. :gh:`1090`
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
- Add an option to request vCard v4.0. :gh:`1066`
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
- Implement digest auth. :gh:`1137`
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
Version 0.19.2
==============
- Improve the performance of ``SingleFileStorage``. :gh:`818`
- Properly document some caveats of the Google Contacts storage.
- Fix crash when using auth certs. :gh:`1033`
- The ``filesystem`` storage can be specified with ``type =
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
functional impact, and is merely for forward compatibility with the Rust
implementation of vdirsyncer.
- Python 3.10 and 3.11 are officially supported.
- Instructions for integrating with Google CalDav/CardDav have changed.
Applications now need to be registered as "Desktop applications". Using "Web
application" no longer works due to changes on Google's side. :gh:`1078`
Version 0.19.1
==============
- Fixed crash when operating on Google Contacts. :gh:`994`
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
- Instructions for integrating with Google CalDav/CardDav have changed.
Applications now need to be registered as "Web Application". :gh:`975`
- Various documentation updates.
Version 0.19.0 Version 0.19.0
============== ==============
@ -19,6 +73,10 @@ Version 0.19.0
- Add a new ``showconfig`` status. This prints *some* configuration values as - Add a new ``showconfig`` status. This prints *some* configuration values as
JSON. This is intended to be used by external tools and helpers that interact JSON. This is intended to be used by external tools and helpers that interact
with ``vdirsyncer``, and considered experimental. with ``vdirsyncer``, and considered experimental.
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
"create", it implicitly creates missing collections during sync without user
prompts. This simplifies workflows where collections should be automatically
created on both sides.
- Update TLS-related tests that were failing due to weak MDs. :gh:`903` - Update TLS-related tests that were failing due to weak MDs. :gh:`903`
- ``pytest-httpserver`` and ``trustme`` are now required for tests. - ``pytest-httpserver`` and ``trustme`` are now required for tests.
- ``pytest-localserver`` is no longer required for tests. - ``pytest-localserver`` is no longer required for tests.

View file

@ -20,14 +20,8 @@ export CI := false
# Whether to generate coverage data while running tests. # Whether to generate coverage data while running tests.
export COVERAGE := $(CI) export COVERAGE := $(CI)
# Additional arguments that should be passed to py.test.
PYTEST_ARGS =
# Variables below this line are not very interesting for getting started. # Variables below this line are not very interesting for getting started.
TEST_EXTRA_PACKAGES =
PYTEST = py.test $(PYTEST_ARGS)
CODECOV_PATH = /tmp/codecov.sh CODECOV_PATH = /tmp/codecov.sh
all: all:
@ -35,32 +29,21 @@ all:
ci-test: ci-test:
curl -s https://codecov.io/bash > $(CODECOV_PATH) curl -s https://codecov.io/bash > $(CODECOV_PATH)
$(PYTEST) --cov vdirsyncer --cov-append tests/unit/ tests/system/ pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
bash $(CODECOV_PATH) -c bash $(CODECOV_PATH) -c
ci-test-storage: ci-test-storage:
curl -s https://codecov.io/bash > $(CODECOV_PATH) curl -s https://codecov.io/bash > $(CODECOV_PATH)
set -ex; \ set -ex; \
for server in $(DAV_SERVER); do \ for server in $(DAV_SERVER); do \
DAV_SERVER=$$server $(PYTEST) --cov vdirsyncer --cov-append tests/storage; \ DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
done done
bash $(CODECOV_PATH) -c bash $(CODECOV_PATH) -c
test: check:
$(PYTEST) ruff check
ruff format --diff
style: #mypy vdirsyncer
pre-commit run --all
! git grep -i syncroniz */*
! git grep -i 'text/icalendar' */*
sphinx-build -W -b html ./docs/ ./docs/_build/html/
install-docs:
pip install -Ur docs-requirements.txt
docs:
cd docs && make html
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
release-deb: release-deb:
sh scripts/release-deb.sh debian jessie sh scripts/release-deb.sh debian jessie
@ -71,11 +54,10 @@ release-deb:
install-dev: install-dev:
pip install -U pip setuptools wheel pip install -U pip setuptools wheel
pip install -e . pip install -e '.[test,check,docs]'
pip install -Ur test-requirements.txt $(TEST_EXTRA_PACKAGES)
pip install pre-commit
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \ set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \ pip install pyproject-dependencies && \
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
fi fi
.PHONY: docs .PHONY: docs

View file

@ -40,7 +40,7 @@ servers. It can also be used to synchronize calendars and/or addressbooks
between two servers directly. between two servers directly.
It aims to be for calendars and contacts what `OfflineIMAP It aims to be for calendars and contacts what `OfflineIMAP
<http://offlineimap.org/>`_ is for emails. <https://www.offlineimap.org/>`_ is for emails.
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/ .. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
@ -59,6 +59,15 @@ Links of interest
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_ * `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
Dockerized
=================
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
Docker environment, you can check out the following GitHub Repository:
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
License License
======= =======

View file

@ -16,6 +16,9 @@ SPDX-License-Identifier: BSD-3-Clause
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de> SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
Author: <bernhard.reiter@intevation.de> Author: <bernhard.reiter@intevation.de>
""" """
from __future__ import annotations
import re import re
import subprocess import subprocess
import sys import sys
@ -51,8 +54,8 @@ def main(ical1_filename, ical2_filename):
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n" f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
"or the second entry:\n" "or the second entry:\n"
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?", f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
*additional_args,
] ]
+ additional_args
) )
if r.returncode == 2: if r.returncode == 2:

View file

@ -1,3 +0,0 @@
sphinx != 1.4.7
sphinx_rtd_theme
setuptools_scm

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import datetime import datetime
import os import os
@ -18,7 +20,7 @@ copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
release = get_distribution("vdirsyncer").version release = get_distribution("vdirsyncer").version
version = ".".join(release.split(".")[:2]) # The short X.Y version. version = ".".join(release.split(".")[:2]) # The short X.Y version.
rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
exclude_patterns = ["_build"] exclude_patterns = ["_build"]
@ -35,9 +37,7 @@ except ImportError:
html_theme = "default" html_theme = "default"
if not on_rtd: if not on_rtd:
print("-" * 74) print("-" * 74)
print( print("Warning: sphinx-rtd-theme not installed, building with default theme.")
"Warning: sphinx-rtd-theme not installed, building with default " "theme."
)
print("-" * 74) print("-" * 74)
html_static_path = ["_static"] html_static_path = ["_static"]
@ -76,7 +76,7 @@ def github_issue_role(name, rawtext, text, lineno, inliner, options=None, conten
try: try:
issue_num = int(text) issue_num = int(text)
if issue_num <= 0: if issue_num <= 0:
raise ValueError() raise ValueError
except ValueError: except ValueError:
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno) msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg) prb = inliner.problematic(rawtext, rawtext, msg)

View file

@ -61,7 +61,8 @@ Pair Section
sync`` is executed. See also :ref:`collections_tutorial`. sync`` is executed. See also :ref:`collections_tutorial`.
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
autodiscovery on a specific storage. autodiscovery on a specific storage. It means all the collections on side A /
side B.
If the collection you want to sync doesn't have the same name on each side, If the collection you want to sync doesn't have the same name on each side,
you may also use a value of the form ``["config_name", "name_a", "name_b"]``. you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
@ -71,8 +72,8 @@ Pair Section
Examples: Examples:
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the - ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
collections from side B, and also the collections named "foo" and "bar". the collections from side B, and also the collections named "foo" and "bar".
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all - ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
existing collections on either side. existing collections on either side.
@ -127,6 +128,16 @@ Pair Section
The ``conflict_resolution`` parameter applies for these properties too. The ``conflict_resolution`` parameter applies for these properties too.
.. _implicit_def:
- ``implicit``: Opt into implicitly creating collections. Example::
implicit = "create"
When set to "create", missing collections are automatically created on both
sides during sync without prompting the user. This simplifies workflows where
all collections should be synchronized bidirectionally.
.. _storage_config: .. _storage_config:
Storage Section Storage Section
@ -238,6 +249,7 @@ CalDAV and CardDAV
#useragent = "vdirsyncer/0.16.4" #useragent = "vdirsyncer/0.16.4"
#verify_fingerprint = null #verify_fingerprint = null
#auth_cert = null #auth_cert = null
#use_vcard_4 = false
:param url: Base URL or an URL to an addressbook. :param url: Base URL or an URL to an addressbook.
:param username: Username for authentication. :param username: Username for authentication.
@ -255,6 +267,7 @@ CalDAV and CardDAV
certificate and the key or a list of paths to the files certificate and the key or a list of paths to the files
with them. with them.
:param useragent: Default ``vdirsyncer``. :param useragent: Default ``vdirsyncer``.
:param use_vcard_4: Whether the server use vCard 4.0.
Google Google
++++++ ++++++
@ -268,6 +281,14 @@ in terms of data safety**. See `this blog post
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back <https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
up your data. up your data.
Another caveat is that Google group labels are not synced with vCard's
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
(also see :gh:`814` and
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
for reference) and the
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
synced when only partial date information is present (e.g. the year is missing).
At first run you will be asked to authorize application for Google account At first run you will be asked to authorize application for Google account
access. access.
@ -279,25 +300,29 @@ Furthermore you need to register vdirsyncer as an application yourself to
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
Service to hardcode those into opensource software [googleterms]_: Service to hardcode those into opensource software [googleterms]_:
1. Go to the `Google API Manager <https://console.developers.google.com>`_ and 1. Go to the `Google API Manager <https://console.developers.google.com>`_
create a new project under any name.
2. Create a new project under any name.
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the 2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
Calendar and Contacts APIs, those are different and won't work). There should Calendar and Contacts APIs, those are different and won't work). There should
be a searchbox where you can just enter those terms. be a search box where you can just enter those terms.
3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The 3. In the sidebar, select "Credentials", then "Create Credentials" and create a
application type is "Other". new "OAuth Client ID".
You'll be prompted to create a OAuth consent screen first. Fill out that You'll be prompted to create a OAuth consent screen first. Fill out that
form however you like. form however you like.
After setting up the consent screen, finish creating the new "OAuth Client
ID'. The correct application type is "Desktop application".
4. Finally you should have a Client ID and a Client secret. Provide these in 4. Finally you should have a Client ID and a Client secret. Provide these in
your storage config. your storage config.
The ``token_file`` parameter should be a filepath where vdirsyncer can later The ``token_file`` parameter should be a path to a file where vdirsyncer can
store authentication-related data. You do not need to create the file itself later store authentication-related data. You do not need to create the file
or write anything to it. itself or write anything to it.
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_, .. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
section "Confidential Matters". section "Confidential Matters".
@ -305,7 +330,7 @@ or write anything to it.
.. note:: .. note::
You need to configure which calendars Google should offer vdirsyncer using You need to configure which calendars Google should offer vdirsyncer using
a rather hidden `settings page a secret `settings page
<https://calendar.google.com/calendar/syncselect>`_. <https://calendar.google.com/calendar/syncselect>`_.
.. storage:: google_calendar .. storage:: google_calendar
@ -345,6 +370,10 @@ or write anything to it.
:param client_id/client_secret: OAuth credentials, obtained from the Google :param client_id/client_secret: OAuth credentials, obtained from the Google
API Manager. API Manager.
The current flow is not ideal, but Google has deprecated the previous APIs used
for this without providing a suitable replacement. See :gh:`975` for discussion
on the topic.
Local Local
+++++ +++++
@ -360,6 +389,7 @@ Local
fileext = "..." fileext = "..."
#encoding = "utf-8" #encoding = "utf-8"
#post_hook = null #post_hook = null
#pre_deletion_hook = null
#fileignoreext = ".tmp" #fileignoreext = ".tmp"
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
@ -381,6 +411,8 @@ Local
:param post_hook: A command to call for each item creation and :param post_hook: A command to call for each item creation and
modification. The command will be called with the path of the modification. The command will be called with the path of the
new/updated file. new/updated file.
:param pre_deletion_hook: A command to call for each item deletion.
The command will be called with the path of the deleted file.
:param fileeignoreext: The file extention to ignore. It is only useful :param fileeignoreext: The file extention to ignore. It is only useful
if fileext is set to the empty string. The default is ``.tmp``. if fileext is set to the empty string. The default is ``.tmp``.
@ -462,6 +494,7 @@ leads to an error.
[storage holidays_remote] [storage holidays_remote]
type = "http" type = "http"
url = https://example.com/holidays_from_hicksville.ics url = https://example.com/holidays_from_hicksville.ics
#filter_hook = null
Too many WebCAL providers generate UIDs of all ``VEVENT``-components Too many WebCAL providers generate UIDs of all ``VEVENT``-components
on-the-fly, i.e. all UIDs change every time the calendar is downloaded. on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
@ -486,3 +519,8 @@ leads to an error.
:param auth_cert: Optional. Either a path to a certificate with a client :param auth_cert: Optional. Either a path to a certificate with a client
certificate and the key or a list of paths to the files with them. certificate and the key or a list of paths to the files with them.
:param useragent: Default ``vdirsyncer``. :param useragent: Default ``vdirsyncer``.
:param filter_hook: Optional. A filter command to call for each fetched
item, passed in raw form to stdin and returned via stdout.
If nothing is returned by the filter command, the item is skipped.
This can be used to alter fields as needed when dealing with providers
generating malformed events.

View file

@ -9,7 +9,4 @@ Support and Contact
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for * Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
concrete bug reports and feature requests. concrete bug reports and feature requests.
* Lastly, you can also `contact the author directly * For security issues, contact ``contact@pimutils.org``.
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
that doesn't work out (i.e. if I don't respond within one week), use
``contact@pimutils.org``.

View file

@ -81,7 +81,7 @@ virtualenv_ and run this inside of it::
# Install development dependencies, including: # Install development dependencies, including:
# - vdirsyncer from the repo into the virtualenv # - vdirsyncer from the repo into the virtualenv
# - stylecheckers (flake8) and code formatters (autopep8) # - style checks and formatting (ruff)
make install-dev make install-dev
# Install git commit hook for some extra linting and checking # Install git commit hook for some extra linting and checking
@ -89,9 +89,10 @@ virtualenv_ and run this inside of it::
Then you can run:: Then you can run::
make test # The normal testsuite pytest # The normal testsuite
make style # Stylechecker pre-commit run --all # Run all linters (which also run via pre-commit)
make docs # Build the HTML docs, output is at docs/_build/html/ make -C docs html # Build the HTML docs, output is at docs/_build/html/
make -C docs linkcheck # Check docs for any broken links
The ``Makefile`` has a lot of options that allow you to control which tests are The ``Makefile`` has a lot of options that allow you to control which tests are
run, and which servers are tested. Take a look at its code where they are all run, and which servers are tested. Take a look at its code where they are all

View file

@ -10,14 +10,15 @@ OS/distro packages
The following packages are community-contributed and were up-to-date at the The following packages are community-contributed and were up-to-date at the
time of writing: time of writing:
- `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_ - `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
- `Ubuntu and Debian, x86_64-only - `Ubuntu and Debian, x86_64-only
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist <https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
in the official repositories but may be out of date) in the official repositories but may be out of date)
- `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_ - `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_ - `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
- `BSD (pkgsrc) <http://pkgsrc.se/time/py-vdirsyncer>`_ - `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_ - `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
We only support the latest version of vdirsyncer, which is at the time of this We only support the latest version of vdirsyncer, which is at the time of this
writing |vdirsyncer_version|. Please **do not file bugs if you use an older writing |vdirsyncer_version|. Please **do not file bugs if you use an older
@ -41,7 +42,7 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
use Python's package manager "pip". First, you'll have to check that the use Python's package manager "pip". First, you'll have to check that the
following things are installed: following things are installed:
- Python 3.7+ and pip. - Python 3.9 to 3.13 and pip.
- ``libxml`` and ``libxslt`` - ``libxml`` and ``libxslt``
- ``zlib`` - ``zlib``
- Linux or macOS. **Windows is not supported**, see :gh:`535`. - Linux or macOS. **Windows is not supported**, see :gh:`535`.
@ -49,19 +50,45 @@ following things are installed:
On Linux systems, using the distro's package manager is the best On Linux systems, using the distro's package manager is the best
way to do this, for example, using Ubuntu:: way to do this, for example, using Ubuntu::
sudo apt-get install libxml2 libxslt1.1 zlib1g python sudo apt-get install libxml2 libxslt1.1 zlib1g python3
Then you have several options. The following text applies for most Python Then you have several options. The following text applies for most Python
software by the way. software by the way.
pipx: The clean, easy way
~~~~~~~~~~~~~~~~~~~~~~~~~
pipx_ is a new package manager for Python-based software that automatically
sets up a virtual environment for each program it installs. Please note that
installing via pipx will not include manual pages nor systemd services.
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
Assuming that pipx is installed, vdirsyncer can be installed with::
pipx install vdirsyncer
It can later be updated to the latest version with::
pipx upgrade vdirsyncer
And can be uninstalled with::
pipx uninstall vdirsyncer
This last command will remove vdirsyncer and any dependencies installed into
the above location.
.. _pipx: https://github.com/pipxproject/pipx
The dirty, easy way The dirty, easy way
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
The easiest way to install vdirsyncer at this point would be to run:: If pipx is not available on your distribution, the easiest way to install
vdirsyncer at this point would be to run::
pip install --user --ignore-installed vdirsyncer pip install --ignore-installed vdirsyncer
- ``--user`` is to install without root rights (into your home directory)
- ``--ignore-installed`` is to work around Debian's potentially broken packages - ``--ignore-installed`` is to work around Debian's potentially broken packages
(see :ref:`debian-urllib3`). (see :ref:`debian-urllib3`).
@ -92,25 +119,4 @@ This method has two advantages:
distro-specific issues. distro-specific issues.
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely. - You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
The clean, easy way
~~~~~~~~~~~~~~~~~~~
pipx_ is a new package manager for Python-based software that automatically
sets up a virtualenv for each program you install. Assuming you have it
installed on your operating system, you can do::
pipx install vdirsyncer
and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
update vdirsyncer to the latest version::
pipx upgrade vdirsyncer
If you're done with vdirsyncer, you can do::
pipx uninstall vdirsyncer
and vdirsyncer will be uninstalled, including its dependencies.
.. _virtualenv: https://virtualenv.readthedocs.io/ .. _virtualenv: https://virtualenv.readthedocs.io/
.. _pipx: https://github.com/pipxproject/pipx

View file

@ -78,3 +78,19 @@ You can also simply prompt for the password::
type = "caldav" type = "caldav"
username = "myusername" username = "myusername"
password.fetch = ["prompt", "Password for CalDAV"] password.fetch = ["prompt", "Password for CalDAV"]
Environment variable
===============
To read the password from an environment variable::
[storage foo]
type = "caldav"
username = "myusername"
password.fetch = ["command", "printenv", "DAV_PW"]
This is especially handy if you use the same password multiple times
(say, for a CardDAV and a CalDAV storage).
On bash, you can read and export the password without printing::
read -s DAV_PW "DAV Password: " && export DAV_PW

View file

@ -46,15 +46,16 @@ You can install the all development dependencies with::
make install-dev make install-dev
You probably don't want this since it will use pip to download the You probably don't want this since it will use pip to download the
dependencies. Alternatively you can find the testing dependencies in dependencies. Alternatively test dependencies are listed as ``test`` optional
``test-requirements.txt``, again with lower-bound version requirements. dependencies in ``pyproject.toml``, again with lower-bound version
requirements.
You also have to have vdirsyncer fully installed at this point. Merely You also have to have vdirsyncer fully installed at this point. Merely
``cd``-ing into the tarball will not be sufficient. ``cd``-ing into the tarball will not be sufficient.
Running the tests happens with:: Running the tests happens with::
make test pytest
Hypothesis will randomly generate test input. If you care about deterministic Hypothesis will randomly generate test input. If you care about deterministic
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``:: tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
@ -73,10 +74,11 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
take care of the HTML docs' formatting. take care of the HTML docs' formatting.
You can find a list of dependencies in ``docs-requirements.txt``. Again, you You can find a list of dependencies in ``pyproject.toml``, in the
can install those using pip with:: ``project.optional-dependencies`` section as ``docs``. Again, you can install
those using pip with::
make install-docs pip install '.[docs]'
Then change into the ``docs/`` directory and build whatever format you want Then change into the ``docs/`` directory and build whatever format you want
using the ``Makefile`` in there (run ``make`` for the formats you can build). using the ``Makefile`` in there (run ``make`` for the formats you can build).

View file

@ -18,5 +18,5 @@ package that don't play well with packages assuming a normal ``requests``. This
is due to stubbornness on both sides. is due to stubbornness on both sides.
See :gh:`82` and :gh:`140` for past discussions. You have one option to work See :gh:`82` and :gh:`140` for past discussions. You have one option to work
around this, that is, to install vdirsyncer in a virtualenv, see around this, that is, to install vdirsyncer in a virtual environment, see
:ref:`manual-installation`. :ref:`manual-installation`.

View file

@ -14,14 +14,14 @@ To pin the certificate by fingerprint::
[storage foo] [storage foo]
type = "caldav" type = "caldav"
... ...
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA" verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
SHA256-Fingerprints can be used. CA validation is disabled when pinning a SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
fingerprint. CA validation is disabled when pinning a fingerprint.
You can use the following command for obtaining a SHA-1 fingerprint:: You can use the following command for obtaining a SHA256 fingerprint::
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
that you can forget about all of that. It is easier to deploy a free that you can forget about all of that. It is easier to deploy a free
@ -40,22 +40,16 @@ To point vdirsyncer to a custom set of root CAs::
... ...
verify = "/path/to/cert.pem" verify = "/path/to/cert.pem"
Vdirsyncer uses the requests_ library, which, by default, `uses its own set of Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
trusted CAs https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
However, the actual behavior depends on how you have installed it. Many Linux There are cases where certificate validation fails even though you can access
distributions patch their ``python-requests`` package to use the system the server fine through e.g. your browser. This usually indicates that your
certificate CAs. Normally these two stores are similar enough for you to not installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
care. such cases, it makes sense to explicitly set ``verify`` or
``verify_fingerprint`` as shown above.
But there are cases where certificate validation fails even though you can .. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
access the server fine through e.g. your browser. This usually indicates that
your installation of the ``requests`` library is somehow broken. In such cases,
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
above.
.. _requests: http://www.python-requests.org/
.. _ssl-client-certs: .. _ssl-client-certs:

View file

@ -176,8 +176,11 @@ as a file called ``color`` within the calendar folder.
More information about collections More information about collections
---------------------------------- ----------------------------------
"Collection" is a collective term for addressbooks and calendars. Each "Collection" is a collective term for addressbooks and calendars. A Cardav or
collection from a storage has a "collection name", a unique identifier for each Caldav server can contains several "collections" which correspond to several
addressbooks or calendar.
Each collection from a storage has a "collection name", a unique identifier for each
collection. In the case of :storage:`filesystem`-storage, this is the name of the collection. In the case of :storage:`filesystem`-storage, this is the name of the
directory that represents the collection, in the case of the DAV-storages this directory that represents the collection, in the case of the DAV-storages this
is the last segment of the URL. We use this identifier in the ``collections`` is the last segment of the URL. We use this identifier in the ``collections``

View file

@ -48,10 +48,9 @@ instance to subfolders of ``~/.calendar/``.
Setting up todoman Setting up todoman
================== ==================
Write this to ``~/.config/todoman/todoman.conf``:: Write this to ``~/.config/todoman/config.py``::
[main] path = "~/.calendars/*"
path = ~/.calendars/*
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``, The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
which is exactly the tasklists we want. Now you can use ``todoman`` as which is exactly the tasklists we want. Now you can use ``todoman`` as

View file

@ -50,7 +50,6 @@ program chosen:
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
e.g. DAVx⁵_ or the apps by dmfs_. e.g. DAVx⁵_ or other apps bundled with smartphones.
.. _DAVx⁵: https://www.davx5.com/ .. _DAVx⁵: https://www.davx5.com/
.. _dmfs: https://dmfs.org/

View file

@ -1,11 +1,13 @@
# Run new version to PyPI. # Push new version to PyPI.
# #
# Usage: hut builds submit publish-release.yaml --follow # Usage: hut builds submit publish-release.yaml --follow
image: alpine/edge image: alpine/edge
packages: packages:
- py3-build
- py3-pip - py3-pip
- py3-setuptools - py3-setuptools
- py3-setuptools_scm
- py3-wheel - py3-wheel
- twine - twine
sources: sources:
@ -23,5 +25,5 @@ tasks:
git describe --exact-match --tags || complete-build git describe --exact-match --tags || complete-build
- publish: | - publish: |
cd vdirsyncer cd vdirsyncer
python setup.py sdist bdist_wheel python -m build --no-isolation
twine upload dist/* twine upload --non-interactive dist/*

114
pyproject.toml Normal file
View file

@ -0,0 +1,114 @@
# Vdirsyncer synchronizes calendars and contacts.
#
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
# how to package vdirsyncer.
[build-system]
requires = ["setuptools>=64", "setuptools_scm>=8"]
build-backend = "setuptools.build_meta"
[project]
name = "vdirsyncer"
authors = [
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
]
description = "Synchronize calendars and contacts"
readme = "README.rst"
requires-python = ">=3.9"
keywords = ["todo", "task", "icalendar", "cli"]
license = "BSD-3-Clause"
license-files = ["LICENSE"]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Office/Business :: Scheduling",
"Topic :: Utilities",
]
dependencies = [
"click>=5.0,<9.0",
"click-log>=0.3.0,<0.5.0",
"requests>=2.20.0",
"aiohttp>=3.8.2,<4.0.0",
"aiostream>=0.4.3,<0.8.0",
"tenacity>=9.0.0",
]
dynamic = ["version"]
[project.optional-dependencies]
google = ["aiohttp-oauthlib"]
test = [
"hypothesis>=6.72.0,<7.0.0",
"pytest",
"pytest-cov",
"pytest-httpserver",
"trustme",
"pytest-asyncio",
"aioresponses",
]
docs = [
"sphinx!=1.4.7",
"sphinx_rtd_theme",
"setuptools_scm",
]
check = [
"mypy",
"ruff",
"types-docutils",
"types-requests",
"types-setuptools",
]
[project.scripts]
vdirsyncer = "vdirsyncer.cli:app"
[tool.ruff.lint]
extend-select = [
"B0",
"C4",
"E",
"I",
"RSE",
"SIM",
"TID",
"UP",
"W",
]
[tool.ruff.lint.isort]
force-single-line = true
required-imports = ["from __future__ import annotations"]
[tool.pytest.ini_options]
addopts = """
--tb=short
--cov-config .coveragerc
--cov=vdirsyncer
--cov-report=term-missing:skip-covered
--no-cov-on-fail
--color=yes
"""
# filterwarnings=error
asyncio_default_fixture_loop_scope = "function"
[tool.mypy]
ignore_missing_imports = true
[tool.coverage.report]
exclude_lines = [
"if TYPE_CHECKING:",
]
[tool.setuptools.packages.find]
include = ["vdirsyncer*"]
[tool.setuptools_scm]
write_to = "vdirsyncer/version.py"
version_scheme = "no-guess-dev"

View file

@ -5,8 +5,10 @@ set -xeu
SCRIPT_PATH=$(realpath "$0") SCRIPT_PATH=$(realpath "$0")
SCRIPT_DIR=$(dirname "$SCRIPT_PATH") SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
DISTRO=$1 # E.g.: debian, ubuntu
DISTROVER=$2 DISTRO=${DISTRO:1}
# E.g.: bullseye, bookwork
DISTROVER=${DISTROVER:2}
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}" CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
CONTEXT="$(mktemp -d)" CONTEXT="$(mktemp -d)"
@ -21,7 +23,7 @@ trap cleanup EXIT
cp scripts/_build_deb_in_container.bash "$CONTEXT" cp scripts/_build_deb_in_container.bash "$CONTEXT"
python setup.py sdist -d "$CONTEXT" python setup.py sdist -d "$CONTEXT"
podman run -it \ docker run -it \
--name "$CONTAINER_NAME" \ --name "$CONTAINER_NAME" \
--volume "$CONTEXT:/source" \ --volume "$CONTEXT:/source" \
"$DISTRO:$DISTROVER" \ "$DISTRO:$DISTROVER" \

View file

@ -1,28 +0,0 @@
[tool:pytest]
addopts =
--tb=short
--cov-config .coveragerc
--cov=vdirsyncer
--cov-report=term-missing:skip-covered
--no-cov-on-fail
--color=yes
# filterwarnings=error
[flake8]
application-import-names = tests,vdirsyncer
extend-ignore =
E203, # Black-incompatible colon spacing.
W503, # Line jump before binary operator.
I100,
I202
max-line-length = 88
exclude = .eggs,build
import-order-style = smarkets
[isort]
force_single_line=true
[mypy]
ignore_missing_imports = True
# See https://github.com/python/mypy/issues/7511:
warn_no_return = False

View file

@ -1,80 +0,0 @@
"""
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
"""
from setuptools import Command
from setuptools import find_packages
from setuptools import setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
"click>=5.0,<9.0",
"click-log>=0.3.0, <0.5.0",
"requests >=2.20.0",
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
"requests_toolbelt >=0.4.0",
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
"atomicwrites>=0.1.7",
"aiohttp>=3.8.0,<4.0.0",
"aiostream>=0.4.3,<0.5.0",
]
class PrintRequirements(Command):
description = "Prints minimal requirements"
user_options: list = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
with open("README.rst") as f:
long_description = f.read()
setup(
# General metadata
name="vdirsyncer",
author="Markus Unterwaditzer",
author_email="markus@unterwaditzer.net",
url="https://github.com/pimutils/vdirsyncer",
description="Synchronize calendars and contacts",
license="BSD",
long_description=long_description,
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
"google": ["aiohttp-oauthlib"],
},
# Build dependencies
setup_requires=["setuptools_scm != 1.12.0"],
# Other
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
cmdclass={"minimal_requirements": PrintRequirements},
use_scm_version={"write_to": "vdirsyncer/version.py"},
entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Utilities",
],
)

View file

@ -1,7 +0,0 @@
hypothesis>=5.0.0,<7.0.0
pytest
pytest-cov
pytest-httpserver
trustme
pytest-asyncio
aioresponses

View file

@ -1,6 +1,9 @@
""" """
Test suite for vdirsyncer. Test suite for vdirsyncer.
""" """
from __future__ import annotations
import hypothesis.strategies as st import hypothesis.strategies as st
import urllib3.exceptions import urllib3.exceptions
@ -100,10 +103,8 @@ X-SOMETHING:{r}
HAHA:YES HAHA:YES
END:FOO""" END:FOO"""
printable_characters_strategy = st.text( printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
st.characters(blacklist_categories=("Cc", "Cs"))
)
uid_strategy = st.text( uid_strategy = st.text(
st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1 st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)

View file

@ -1,6 +1,9 @@
""" """
General-purpose fixtures for vdirsyncer's testsuite. General-purpose fixtures for vdirsyncer's testsuite.
""" """
from __future__ import annotations
import logging import logging
import os import os
@ -42,7 +45,7 @@ settings.register_profile(
"deterministic", "deterministic",
settings( settings(
derandomize=True, derandomize=True,
suppress_health_check=HealthCheck.all(), suppress_health_check=list(HealthCheck),
), ),
) )
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow])) settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
@ -56,12 +59,12 @@ else:
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def aio_session(event_loop): async def aio_session():
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
yield session yield session
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def aio_connector(event_loop): async def aio_connector():
async with aiohttp.TCPConnector(limit_per_host=16) as conn: async with aiohttp.TCPConnector(limit_per_host=16) as conn:
yield conn yield conn

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import random import random
import textwrap import textwrap
import uuid import uuid
@ -8,16 +10,15 @@ import aiostream
import pytest import pytest
import pytest_asyncio import pytest_asyncio
from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE
from tests import assert_item_equals
from tests import normalize_item
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.storage.base import normalize_meta_value
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import EVENT_TEMPLATE
from .. import TASK_TEMPLATE
from .. import VCARD_TEMPLATE
from .. import assert_item_equals
from .. import normalize_item
def get_server_mixin(server_name): def get_server_mixin(server_name):
from . import __name__ as base from . import __name__ as base
@ -49,7 +50,7 @@ class StorageTests:
:param collection: The name of the collection to create and use. :param collection: The name of the collection to create and use.
""" """
raise NotImplementedError() raise NotImplementedError
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def s(self, get_storage_args): async def s(self, get_storage_args):
@ -103,7 +104,7 @@ class StorageTests:
href, etag = await s.upload(get_item()) href, etag = await s.upload(get_item())
if etag is None: if etag is None:
_, etag = await s.get(href) _, etag = await s.get(href)
((href2, item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2)) ((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
assert href2 == href assert href2 == href
assert etag2 == etag assert etag2 == etag
@ -117,7 +118,7 @@ class StorageTests:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_upload(self, s, get_item): async def test_upload(self, s, get_item):
item = get_item() item = get_item()
href, etag = await s.upload(item) href, _etag = await s.upload(item)
assert_item_equals((await s.get(href))[0], item) assert_item_equals((await s.get(href))[0], item)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -145,7 +146,7 @@ class StorageTests:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_wrong_etag(self, s, get_item): async def test_wrong_etag(self, s, get_item):
item = get_item() item = get_item()
href, etag = await s.upload(item) href, _etag = await s.upload(item)
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
await s.update(href, item, '"lolnope"') await s.update(href, item, '"lolnope"')
with pytest.raises(exceptions.PreconditionFailed): with pytest.raises(exceptions.PreconditionFailed):
@ -193,8 +194,7 @@ class StorageTests:
) )
assert {href: etag for href, item, etag in items} == info assert {href: etag for href, item, etag in items} == info
@pytest.mark.asyncio def test_repr(self, s):
def test_repr(self, s, get_storage_args): # XXX: unused param
assert self.storage_class.__name__ in repr(s) assert self.storage_class.__name__ in repr(s)
assert s.instance_name is None assert s.instance_name is None
@ -384,7 +384,7 @@ class StorageTests:
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
item = Item( item = Item(
textwrap.dedent( textwrap.dedent(
""" f"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
BEGIN:VEVENT BEGIN:VEVENT
@ -418,13 +418,11 @@ class StorageTests:
TRANSP:OPAQUE TRANSP:OPAQUE
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
""".format( """
uid=uid
)
).strip() ).strip()
) )
href, etag = await s.upload(item) href, _etag = await s.upload(item)
item2, etag2 = await s.get(href) item2, _etag2 = await s.get(href)
assert normalize_item(item) == normalize_item(item2) assert normalize_item(item) == normalize_item(item2)

View file

@ -1,9 +1,10 @@
from __future__ import annotations
import asyncio import asyncio
import contextlib import contextlib
import subprocess import subprocess
import time import time
import uuid import uuid
from typing import Type
import aiostream import aiostream
import pytest import pytest
@ -90,7 +91,7 @@ async def slow_create_collection(request, aio_connector):
# storage limits. # storage limits.
to_delete = [] to_delete = []
async def inner(cls: Type, args: dict, collection_name: str) -> dict: async def inner(cls: type, args: dict, collection_name: str) -> dict:
"""Create a collection """Create a collection
Returns args necessary to create a Storage instance pointing to it. Returns args necessary to create a Storage instance pointing to it.

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
import uuid import uuid
@ -6,12 +8,11 @@ import aiostream
import pytest import pytest
from tests import assert_item_equals from tests import assert_item_equals
from tests.storage import StorageTests
from tests.storage import get_server_mixin
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import StorageTests
from .. import get_server_mixin
dav_server = os.environ.get("DAV_SERVER", "skip") dav_server = os.environ.get("DAV_SERVER", "skip")
ServerMixin = get_server_mixin(dav_server) ServerMixin = get_server_mixin(dav_server)
@ -47,6 +48,6 @@ class DAVStorageTests(ServerMixin, StorageTests):
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext) monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
item = get_item(uid="град сатану" + str(uuid.uuid4())) item = get_item(uid="град сатану" + str(uuid.uuid4()))
href, etag = await s.upload(item) href, _etag = await s.upload(item)
item2, etag2 = await s.get(href) item2, _etag2 = await s.get(href)
assert_item_equals(item, item2) assert_item_equals(item, item2)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
import contextlib
import datetime import datetime
from textwrap import dedent from textwrap import dedent
@ -9,10 +12,10 @@ from aioresponses import aioresponses
from tests import EVENT_TEMPLATE from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE from tests import VCARD_TEMPLATE
from tests.storage import format_item
from vdirsyncer import exceptions from vdirsyncer import exceptions
from vdirsyncer.storage.dav import CalDAVStorage from vdirsyncer.storage.dav import CalDAVStorage
from .. import format_item
from . import DAVStorageTests from . import DAVStorageTests
from . import dav_server from . import dav_server
@ -28,18 +31,16 @@ class TestCalDAVStorage(DAVStorageTests):
async def test_doesnt_accept_vcard(self, item_type, get_storage_args): async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
s = self.storage_class(item_types=(item_type,), **await get_storage_args()) s = self.storage_class(item_types=(item_type,), **await get_storage_args())
try: # Most storages hard-fail, but xandikos doesn't.
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
await s.upload(format_item(VCARD_TEMPLATE)) await s.upload(format_item(VCARD_TEMPLATE))
except (exceptions.Error, aiohttp.ClientResponseError):
# Most storages hard-fail, but xandikos doesn't.
pass
assert not await aiostream.stream.list(s.list()) assert not await aiostream.stream.list(s.list())
# The `arg` param is not named `item_types` because that would hit # The `arg` param is not named `item_types` because that would hit
# https://bitbucket.org/pytest-dev/pytest/issue/745/ # https://bitbucket.org/pytest-dev/pytest/issue/745/
@pytest.mark.parametrize( @pytest.mark.parametrize(
"arg,calls_num", ("arg", "calls_num"),
[ [
(("VTODO",), 1), (("VTODO",), 1),
(("VEVENT",), 1), (("VEVENT",), 1),

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.dav import CardDAVStorage from vdirsyncer.storage.dav import CardDAVStorage

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.dav import _BAD_XML_CHARS from vdirsyncer.storage.dav import _BAD_XML_CHARS
@ -39,8 +41,8 @@ def test_xml_utilities():
def test_xml_specialchars(char): def test_xml_specialchars(char):
x = _parse_xml( x = _parse_xml(
'<?xml version="1.0" encoding="UTF-8" ?>' '<?xml version="1.0" encoding="UTF-8" ?>'
"<foo>ye{}s\r\n" f"<foo>ye{chr(char)}s\r\n"
"hello</foo>".format(chr(char)).encode("ascii") "hello</foo>".encode("ascii")
) )
if char in _BAD_XML_CHARS: if char in _BAD_XML_CHARS:
@ -50,7 +52,7 @@ def test_xml_specialchars(char):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"href", "href",
[ [
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics", # noqa: E501 "/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
], ],
) )
def test_normalize_href(href): def test_normalize_href(href):

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
import uuid import uuid
@ -11,7 +13,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/", "url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
} }
except KeyError as e: except KeyError as e:
pytestmark = pytest.mark.skip(f"Missing envkey: {str(e)}") pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
@pytest.mark.flaky(reruns=5) @pytest.mark.flaky(reruns=5)
@ -23,7 +25,7 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
pytest.skip("No carddav") pytest.skip("No carddav")
else: else:
raise RuntimeError() raise RuntimeError
@pytest.fixture @pytest.fixture
def get_storage_args(self, davical_args, request): def get_storage_args(self, davical_args, request):

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
import pytest import pytest
@ -6,11 +8,13 @@ import pytest
class ServerMixin: class ServerMixin:
@pytest.fixture @pytest.fixture
def get_storage_args(self, slow_create_collection, aio_connector, request): def get_storage_args(self, slow_create_collection, aio_connector, request):
if "item_type" in request.fixturenames: if (
if request.getfixturevalue("item_type") == "VTODO": "item_type" in request.fixturenames
# Fastmail has non-standard support for TODOs and request.getfixturevalue("item_type") == "VTODO"
# See https://github.com/pimutils/vdirsyncer/issues/824 ):
pytest.skip("Fastmail has non-standard VTODO support.") # Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"): async def inner(collection="test"):
args = { args = {
@ -24,7 +28,7 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
args["url"] = "https://carddav.fastmail.com/" args["url"] = "https://carddav.fastmail.com/"
else: else:
raise RuntimeError() raise RuntimeError
if collection is not None: if collection is not None:
args = await slow_create_collection( args = await slow_create_collection(

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
import pytest import pytest
@ -8,7 +10,7 @@ class ServerMixin:
def get_storage_args(self, item_type, slow_create_collection): def get_storage_args(self, item_type, slow_create_collection):
if item_type != "VEVENT": if item_type != "VEVENT":
# iCloud collections can either be calendars or task lists. # iCloud collections can either be calendars or task lists.
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
pytest.skip("iCloud doesn't support anything else than VEVENT") pytest.skip("iCloud doesn't support anything else than VEVENT")
async def inner(collection="test"): async def inner(collection="test"):
@ -22,7 +24,7 @@ class ServerMixin:
elif self.storage_class.fileext == ".vcf": elif self.storage_class.fileext == ".vcf":
args["url"] = "https://contacts.icloud.com/" args["url"] = "https://contacts.icloud.com/"
else: else:
raise RuntimeError() raise RuntimeError
if collection is not None: if collection is not None:
args = slow_create_collection(self.storage_class, args, collection) args = slow_create_collection(self.storage_class, args, collection)

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import subprocess import subprocess
import aiostream import aiostream
@ -46,7 +48,8 @@ class TestFilesystemStorage(StorageTests):
s = self.storage_class(str(tmpdir), ".txt") s = self.storage_class(str(tmpdir), ".txt")
await s.upload(Item("UID:a/b/c")) await s.upload(Item("UID:a/b/c"))
(item_file,) = tmpdir.listdir() (item_file,) = tmpdir.listdir()
assert "/" not in item_file.basename and item_file.isfile() assert "/" not in item_file.basename
assert item_file.isfile()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_ignore_tmp_files(self, tmpdir): async def test_ignore_tmp_files(self, tmpdir):
@ -87,13 +90,13 @@ class TestFilesystemStorage(StorageTests):
storage = self.storage_class(str(tmpdir), ".txt") storage = self.storage_class(str(tmpdir), ".txt")
item = Item("UID:" + "hue" * 600) item = Item("UID:" + "hue" * 600)
href, etag = await storage.upload(item) href, _etag = await storage.upload(item)
assert item.uid not in href assert item.uid not in href
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_post_hook_inactive(self, tmpdir, monkeypatch): async def test_post_hook_inactive(self, tmpdir, monkeypatch):
def check_call_mock(*args, **kwargs): def check_call_mock(*args, **kwargs):
raise AssertionError() raise AssertionError
monkeypatch.setattr(subprocess, "call", check_call_mock) monkeypatch.setattr(subprocess, "call", check_call_mock)

View file

@ -1,10 +1,16 @@
from __future__ import annotations
import aiohttp
import pytest import pytest
from aiohttp import BasicAuth
from aioresponses import CallbackResult from aioresponses import CallbackResult
from aioresponses import aioresponses from aioresponses import aioresponses
from tests import normalize_item from tests import normalize_item
from vdirsyncer.exceptions import UserError from vdirsyncer.exceptions import UserError
from vdirsyncer.http import BasicAuthMethod
from vdirsyncer.http import DigestAuthMethod
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
from vdirsyncer.storage.http import HttpStorage from vdirsyncer.storage.http import HttpStorage
from vdirsyncer.storage.http import prepare_auth from vdirsyncer.storage.http import prepare_auth
@ -35,7 +41,7 @@ async def test_list(aio_connector):
), ),
] ]
responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2 responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
def callback(url, headers, **kwargs): def callback(url, headers, **kwargs):
assert headers["User-Agent"].startswith("vdirsyncer/") assert headers["User-Agent"].startswith("vdirsyncer/")
@ -89,16 +95,14 @@ def test_readonly_param(aio_connector):
def test_prepare_auth(): def test_prepare_auth():
assert prepare_auth(None, "", "") is None assert prepare_auth(None, "", "") is None
assert prepare_auth(None, "user", "pwd") == BasicAuth("user", "pwd") assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
assert prepare_auth("basic", "user", "pwd") == BasicAuth("user", "pwd") assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
assert prepare_auth("basic", "", "pwd") assert prepare_auth("basic", "", "pwd")
assert "you need to specify username and password" in str(excinfo.value).lower() assert "you need to specify username and password" in str(excinfo.value).lower()
from requests.auth import HTTPDigestAuth assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
prepare_auth("ladida", "user", "pwd") prepare_auth("ladida", "user", "pwd")
@ -106,20 +110,12 @@ def test_prepare_auth():
assert "unknown authentication method" in str(excinfo.value).lower() assert "unknown authentication method" in str(excinfo.value).lower()
def test_prepare_auth_guess(monkeypatch): def test_prepare_auth_guess():
import requests_toolbelt.auth.guess # guess auth is currently not supported
assert isinstance(
prepare_auth("guess", "user", "pwd"),
requests_toolbelt.auth.guess.GuessAuth,
)
monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
with pytest.raises(UserError) as excinfo: with pytest.raises(UserError) as excinfo:
prepare_auth("guess", "user", "pwd") prepare_auth("guess", "usr", "pwd")
assert "requests_toolbelt is too old" in str(excinfo.value).lower() assert "not supported" in str(excinfo.value).lower()
def test_verify_false_disallowed(aio_connector): def test_verify_false_disallowed(aio_connector):
@ -127,3 +123,41 @@ def test_verify_false_disallowed(aio_connector):
HttpStorage(url="http://example.com", verify=False, connector=aio_connector) HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
assert "must be a path to a pem-file." in str(excinfo.value).lower() assert "must be a path to a pem-file." in str(excinfo.value).lower()
@pytest.mark.asyncio
async def test_403_usage_limit_exceeded(aio_connector):
url = "http://127.0.0.1/test_403"
error_body = {
"error": {
"errors": [
{
"domain": "usageLimits",
"message": "Calendar usage limits exceeded.",
"reason": "quotaExceeded",
}
],
"code": 403,
"message": "Calendar usage limits exceeded.",
}
}
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, payload=error_body, repeat=True)
with pytest.raises(UsageLimitReached):
await request("GET", url, session)
@pytest.mark.asyncio
async def test_403_without_usage_limits_domain(aio_connector):
"""A 403 JSON error without the Google 'usageLimits' domain should not be
treated as UsageLimitReached and should surface as ClientResponseError.
"""
url = "http://127.0.0.1/test_403_no_usage_limits"
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, repeat=True)
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, session)

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import aiostream import aiostream
import pytest import pytest
from aioresponses import CallbackResult from aioresponses import CallbackResult
@ -18,8 +20,8 @@ class CombinedStorage(Storage):
storage_name = "http_and_singlefile" storage_name = "http_and_singlefile"
def __init__(self, url, path, *, connector, **kwargs): def __init__(self, url, path, *, connector, **kwargs):
if kwargs.get("collection", None) is not None: if kwargs.get("collection") is not None:
raise ValueError() raise ValueError
super().__init__(**kwargs) super().__init__(**kwargs)
self.url = url self.url = url

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.storage.memory import MemoryStorage
@ -6,7 +8,6 @@ from . import StorageTests
class TestMemoryStorage(StorageTests): class TestMemoryStorage(StorageTests):
storage_class = MemoryStorage storage_class = MemoryStorage
supports_collections = False supports_collections = False

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer.storage.singlefile import SingleFileStorage from vdirsyncer.storage.singlefile import SingleFileStorage
@ -6,7 +8,6 @@ from . import StorageTests
class TestSingleFileStorage(StorageTests): class TestSingleFileStorage(StorageTests):
storage_class = SingleFileStorage storage_class = SingleFileStorage
supports_metadata = False supports_metadata = False

View file

@ -1,3 +1,5 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import io import io
from textwrap import dedent from textwrap import dedent
@ -24,7 +26,7 @@ def read_config(tmpdir, monkeypatch):
def test_read_config(read_config): def test_read_config(read_config):
errors, c = read_config( _errors, c = read_config(
""" """
[general] [general]
status_path = "/tmp/status/" status_path = "/tmp/status/"
@ -220,3 +222,62 @@ def test_validate_collections_param():
x([["c", None, "b"]]) x([["c", None, "b"]])
x([["c", "a", None]]) x([["c", "a", None]])
x([["c", None, None]]) x([["c", None, None]])
def test_invalid_implicit_value(read_config):
expected_message = "`implicit` parameter must be 'create' or absent"
with pytest.raises(exceptions.UserError) as excinfo:
read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = null
implicit = "invalid"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert expected_message in str(excinfo.value)
def test_implicit_create_only(read_config):
"""Test that implicit create works."""
errors, c = read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = ["from a", "from b"]
implicit = "create"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert not errors
pair = c.pairs["my_pair"]
assert pair.implicit == "create"

View file

@ -1,6 +1,7 @@
from __future__ import annotations
import json import json
from textwrap import dedent from textwrap import dedent
from typing import List
import pytest import pytest
@ -152,7 +153,7 @@ def test_discover_direct_path(tmpdir, runner):
def test_null_collection_with_named_collection(tmpdir, runner): def test_null_collection_with_named_collection(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -160,15 +161,13 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "singlefile" type = "singlefile"
path = "{base}/bar.txt" path = "{tmpdir!s}/bar.txt"
""".format( """
base=str(tmpdir)
)
) )
) )
@ -192,7 +191,7 @@ def test_null_collection_with_named_collection(tmpdir, runner):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"a_requires,b_requires", ("a_requires", "b_requires"),
[ [
(True, True), (True, True),
(True, False), (True, False),
@ -207,13 +206,13 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
def __init__(self, require_collection, **kw): def __init__(self, require_collection, **kw):
if require_collection: if require_collection:
assert not kw.get("collection") assert not kw.get("collection")
raise exceptions.CollectionRequired() raise exceptions.CollectionRequired
async def get(self, href: str): async def get(self, href: str):
raise NotImplementedError() raise NotImplementedError
async def list(self) -> List[tuple]: async def list(self) -> list[tuple]:
raise NotImplementedError() raise NotImplementedError
from vdirsyncer.cli.utils import storage_names from vdirsyncer.cli.utils import storage_names
@ -221,7 +220,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -229,14 +228,12 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
[storage foo] [storage foo]
type = "test" type = "test"
require_collection = {a} require_collection = {json.dumps(a_requires)}
[storage bar] [storage bar]
type = "test" type = "test"
require_collection = {b} require_collection = {json.dumps(b_requires)}
""".format( """
a=json.dumps(a_requires), b=json.dumps(b_requires)
)
) )
) )

View file

@ -1,10 +1,12 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
def test_get_password_from_command(tmpdir, runner): def test_get_password_from_command(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -12,16 +14,14 @@ def test_get_password_from_command(tmpdir, runner):
[storage foo] [storage foo]
type.fetch = ["shell", "echo filesystem"] type.fetch = ["shell", "echo filesystem"]
path = "{base}/foo/" path = "{tmpdir!s}/foo/"
fileext.fetch = ["command", "echo", ".txt"] fileext.fetch = ["command", "echo", ".txt"]
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{tmpdir!s}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"] fileext.fetch = ["prompt", "Fileext for bar"]
""".format( """
base=str(tmpdir)
)
) )
) )

View file

@ -1,3 +1,5 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import pytest import pytest
@ -56,7 +58,7 @@ def test_repair_uids(storage, runner, repair_uids):
else: else:
opt = ["--no-repair-unsafe-uid"] opt = ["--no-repair-unsafe-uid"]
result = runner.invoke(["repair"] + opt + ["foo"], input="y") result = runner.invoke(["repair", *opt, "foo"], input="y")
assert not result.exception assert not result.exception
if repair_uids: if repair_uids:

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import json import json
import sys import sys
from textwrap import dedent from textwrap import dedent
@ -88,9 +90,7 @@ def test_empty_storage(tmpdir, runner):
result = runner.invoke(["sync"]) result = runner.invoke(["sync"])
lines = result.output.splitlines() lines = result.output.splitlines()
assert lines[0] == "Syncing my_pair" assert lines[0] == "Syncing my_pair"
assert lines[1].startswith( assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.')
"error: my_pair: " 'Storage "my_b" was completely emptied.'
)
assert result.exception assert result.exception
@ -278,27 +278,24 @@ def test_multiple_pairs(tmpdir, runner):
], ],
) )
def test_create_collections(collections, tmpdir, runner): def test_create_collections(collections, tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = {colls} collections = {json.dumps(list(collections))}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{tmpdir!s}/bar/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir), colls=json.dumps(list(collections))
)
) )
) )
@ -316,7 +313,7 @@ def test_create_collections(collections, tmpdir, runner):
def test_ident_conflict(tmpdir, runner): def test_ident_conflict(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -324,16 +321,14 @@ def test_ident_conflict(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{tmpdir!s}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{tmpdir!s}/bar/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir)
)
) )
) )
@ -363,7 +358,7 @@ def test_ident_conflict(tmpdir, runner):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"existing,missing", ("existing", "missing"),
[ [
("foo", "bar"), ("foo", "bar"),
("bar", "foo"), ("bar", "foo"),
@ -372,7 +367,7 @@ def test_ident_conflict(tmpdir, runner):
def test_unknown_storage(tmpdir, runner, existing, missing): def test_unknown_storage(tmpdir, runner, existing, missing):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -380,11 +375,9 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
[storage {existing}] [storage {existing}]
type = "filesystem" type = "filesystem"
path = "{base}/{existing}/" path = "{tmpdir!s}/{existing}/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir), existing=existing
)
) )
) )
@ -394,10 +387,8 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
assert result.exception assert result.exception
assert ( assert (
"Storage '{missing}' not found. " f"Storage '{missing}' not found. "
"These are the configured storages: ['{existing}']".format( f"These are the configured storages: ['{existing}']"
missing=missing, existing=existing
)
) in result.output ) in result.output
@ -411,31 +402,29 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"resolution,expect_foo,expect_bar", ("resolution", "expect_foo", "expect_bar"),
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")], [(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
) )
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar): def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = null collections = null
conflict_resolution = {val} conflict_resolution = {json.dumps(resolution)}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{base}/foo" path = "{tmpdir!s}/foo"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{base}/bar" path = "{tmpdir!s}/bar"
""".format( """
base=str(tmpdir), val=json.dumps(resolution)
)
) )
) )
@ -527,13 +516,11 @@ def test_fetch_only_necessary_params(tmpdir, runner):
fetch_script = tmpdir.join("fetch_script") fetch_script = tmpdir.join("fetch_script")
fetch_script.write( fetch_script.write(
dedent( dedent(
""" f"""
set -e set -e
touch "{}" touch "{fetched_file!s}"
echo ".txt" echo ".txt"
""".format( """
str(fetched_file)
)
) )
) )
@ -564,9 +551,7 @@ def test_fetch_only_necessary_params(tmpdir, runner):
type = "filesystem" type = "filesystem"
path = "{path}" path = "{path}"
fileext.fetch = ["command", "sh", "{script}"] fileext.fetch = ["command", "sh", "{script}"]
""".format( """.format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script))
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
)
) )
) )

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import pytest import pytest
from vdirsyncer import exceptions from vdirsyncer import exceptions
@ -12,7 +14,7 @@ def test_handle_cli_error(capsys):
except BaseException: except BaseException:
handle_cli_error() handle_cli_error()
out, err = capsys.readouterr() _out, err = capsys.readouterr()
assert "returned something vdirsyncer doesn't understand" in err assert "returned something vdirsyncer doesn't understand" in err
assert "ayy lmao" in err assert "ayy lmao" in err

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import ssl import ssl
import pytest import pytest

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import logging import logging
import aiohttp import aiohttp
@ -20,30 +22,34 @@ def test_get_storage_init_args():
from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.storage.memory import MemoryStorage
all, required = utils.get_storage_init_args(MemoryStorage) all, required = utils.get_storage_init_args(MemoryStorage)
assert all == {"fileext", "collection", "read_only", "instance_name"} assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"}
assert not required assert not required
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_request_ssl(): async def test_request_ssl():
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
with pytest.raises(aiohttp.ClientConnectorCertificateError) as excinfo: with pytest.raises(
aiohttp.ClientConnectorCertificateError,
match="certificate verify failed",
):
await http.request( await http.request(
"GET", "GET",
"https://self-signed.badssl.com/", "https://self-signed.badssl.com/",
session=session, session=session,
) )
assert "certificate verify failed" in str(excinfo.value)
# XXX FIXME
with pytest.raises(Exception): @pytest.mark.xfail(reason="feature not implemented")
await http.request( @pytest.mark.asyncio
"GET", async def test_request_unsafe_ssl():
"https://self-signed.badssl.com/", async with aiohttp.ClientSession() as session:
verify=False, await http.request(
session=session, "GET",
) "https://self-signed.badssl.com/",
verify=False,
session=session,
)
def fingerprint_of_cert(cert, hash=hashes.SHA256) -> str: def fingerprint_of_cert(cert, hash=hashes.SHA256) -> str:

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
from vdirsyncer.cli.config import _resolve_conflict_via_command from vdirsyncer.cli.config import _resolve_conflict_via_command

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import aiostream import aiostream
import pytest import pytest
@ -7,7 +9,7 @@ missing = object()
@pytest.mark.parametrize( @pytest.mark.parametrize(
"shortcuts,expected", ("shortcuts", "expected"),
[ [
( (
["from a"], ["from a"],

View file

@ -1,3 +1,5 @@
from __future__ import annotations
from contextlib import contextmanager from contextlib import contextmanager
from unittest.mock import patch from unittest.mock import patch
@ -106,7 +108,7 @@ def test_failed_strategy(monkeypatch, value_cache):
def strategy(x): def strategy(x):
calls.append(x) calls.append(x)
raise KeyboardInterrupt() raise KeyboardInterrupt
monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)

View file

@ -1,3 +1,7 @@
from __future__ import annotations
import contextlib
import hypothesis.strategies as st import hypothesis.strategies as st
from hypothesis import assume from hypothesis import assume
from hypothesis import given from hypothesis import given
@ -22,13 +26,13 @@ def test_legacy_status(status_dict):
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()} hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()} hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
status = SqliteStatus() with contextlib.closing(SqliteStatus()) as status:
status.load_legacy_status(status_dict) status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict assert dict(status.to_legacy_status()) == status_dict
for ident, (meta_a, meta_b) in status_dict.items(): for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a["href"]) ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b["href"]) ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident assert ident_a == ident_b == ident

View file

@ -1,4 +1,7 @@
from __future__ import annotations
import asyncio import asyncio
import contextlib
from copy import deepcopy from copy import deepcopy
import aiostream import aiostream
@ -23,13 +26,12 @@ from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
async def sync(a, b, status, *args, **kwargs): async def sync(a, b, status, *args, **kwargs) -> None:
new_status = SqliteStatus(":memory:") with contextlib.closing(SqliteStatus(":memory:")) as new_status:
new_status.load_legacy_status(status) new_status.load_legacy_status(status)
rv = await _sync(a, b, new_status, *args, **kwargs) await _sync(a, b, new_status, *args, **kwargs)
status.clear() status.clear()
status.update(new_status.to_legacy_status()) status.update(new_status.to_legacy_status())
return rv
def empty_storage(x): def empty_storage(x):
@ -96,7 +98,8 @@ async def test_read_only_and_prefetch():
await sync(a, b, status, force_delete=True) await sync(a, b, status, force_delete=True)
await sync(a, b, status, force_delete=True) await sync(a, b, status, force_delete=True)
assert not items(a) and not items(b) assert not items(a)
assert not items(b)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -224,7 +227,8 @@ async def test_insert_hash():
await a.update(href, Item("UID:1\nHAHA:YES"), etag) await a.update(href, Item("UID:1\nHAHA:YES"), etag)
await sync(a, b, status) await sync(a, b, status)
assert "hash" in status["1"][0] and "hash" in status["1"][1] assert "hash" in status["1"][0]
assert "hash" in status["1"][1]
@pytest.mark.asyncio @pytest.mark.asyncio
@ -344,7 +348,7 @@ async def test_uses_get_multi(monkeypatch):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
item = Item("UID:1") item = Item("UID:1")
expected_href, etag = await a.upload(item) expected_href, _etag = await a.upload(item)
await sync(a, b, {}) await sync(a, b, {})
assert get_multi_calls == [[expected_href]] assert get_multi_calls == [[expected_href]]
@ -381,7 +385,7 @@ async def test_changed_uids():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
href_a, etag_a = await a.upload(Item("UID:A-ONE")) href_a, etag_a = await a.upload(Item("UID:A-ONE"))
href_b, etag_b = await b.upload(Item("UID:B-ONE")) _href_b, _etag_b = await b.upload(Item("UID:B-ONE"))
status = {} status = {}
await sync(a, b, status) await sync(a, b, status)
@ -435,7 +439,7 @@ async def test_partial_sync_revert():
assert items(a) == {"UID:2"} assert items(a) == {"UID:2"}
@pytest.mark.parametrize("sync_inbetween", (True, False)) @pytest.mark.parametrize("sync_inbetween", [True, False])
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_ident_conflict(sync_inbetween): async def test_ident_conflict(sync_inbetween):
a = MemoryStorage() a = MemoryStorage()
@ -465,7 +469,7 @@ async def test_moved_href():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = await a.upload(Item("UID:haha")) _href, _etag = await a.upload(Item("UID:haha"))
await sync(a, b, status) await sync(a, b, status)
b.items["lol"] = b.items.pop("haha") b.items["lol"] = b.items.pop("haha")
@ -526,7 +530,7 @@ async def test_unicode_hrefs():
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
href, etag = await a.upload(Item("UID:äää")) _href, _etag = await a.upload(Item("UID:äää"))
await sync(a, b, status) await sync(a, b, status)
@ -535,7 +539,7 @@ class ActionIntentionallyFailed(Exception):
def action_failure(*a, **kw): def action_failure(*a, **kw):
raise ActionIntentionallyFailed() raise ActionIntentionallyFailed
class SyncMachine(RuleBasedStateMachine): class SyncMachine(RuleBasedStateMachine):
@ -549,7 +553,7 @@ class SyncMachine(RuleBasedStateMachine):
if flaky_etags: if flaky_etags:
async def get(href): async def get(href):
old_etag, item = s.items[href] _old_etag, item = s.items[href]
etag = _random_string() etag = _random_string()
s.items[href] = etag, item s.items[href] = etag, item
return item, etag return item, etag
@ -640,10 +644,7 @@ class SyncMachine(RuleBasedStateMachine):
errors = [] errors = []
if with_error_callback: error_callback = errors.append if with_error_callback else None
error_callback = errors.append
else:
error_callback = None
try: try:
# If one storage is read-only, double-sync because changes don't # If one storage is read-only, double-sync because changes don't
@ -666,7 +667,8 @@ class SyncMachine(RuleBasedStateMachine):
except ActionIntentionallyFailed: except ActionIntentionallyFailed:
pass pass
except BothReadOnly: except BothReadOnly:
assert a.read_only and b.read_only assert a.read_only
assert b.read_only
assume(False) assume(False)
except StorageEmpty: except StorageEmpty:
if force_delete: if force_delete:

View file

@ -1,3 +1,5 @@
from __future__ import annotations
from vdirsyncer import exceptions from vdirsyncer import exceptions

View file

@ -1,3 +1,7 @@
from __future__ import annotations
import asyncio
import hypothesis.strategies as st import hypothesis.strategies as st
import pytest import pytest
import pytest_asyncio import pytest_asyncio
@ -31,7 +35,8 @@ async def test_basic(monkeypatch):
await a.set_meta("foo", None) await a.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") is None and await b.get_meta("foo") is None assert await a.get_meta("foo") is None
assert await b.get_meta("foo") is None
await a.set_meta("foo", "bar") await a.set_meta("foo", "bar")
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
@ -50,27 +55,24 @@ async def test_basic(monkeypatch):
await b.set_meta("foo", None) await b.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"]) await metasync(a, b, status, keys=["foo"])
assert not await a.get_meta("foo") and not await b.get_meta("foo") assert not await a.get_meta("foo")
assert not await b.get_meta("foo")
@pytest_asyncio.fixture @pytest_asyncio.fixture
@pytest.mark.asyncio async def conflict_state(request):
async def conflict_state(request, event_loop):
a = MemoryStorage() a = MemoryStorage()
b = MemoryStorage() b = MemoryStorage()
status = {} status = {}
await a.set_meta("foo", "bar") await a.set_meta("foo", "bar")
await b.set_meta("foo", "baz") await b.set_meta("foo", "baz")
def cleanup(): async def do_cleanup():
async def do_cleanup(): assert await a.get_meta("foo") == "bar"
assert await a.get_meta("foo") == "bar" assert await b.get_meta("foo") == "baz"
assert await b.get_meta("foo") == "baz" assert not status
assert not status
event_loop.run_until_complete(do_cleanup()) request.addfinalizer(lambda: asyncio.run(do_cleanup()))
request.addfinalizer(cleanup)
return a, b, status return a, b, status

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import aiostream import aiostream
import pytest import pytest
from hypothesis import HealthCheck from hypothesis import HealthCheck
@ -15,7 +17,7 @@ from vdirsyncer.vobject import Item
@given(uid=uid_strategy) @given(uid=uid_strategy)
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=HealthCheck.all()) @settings(suppress_health_check=list(HealthCheck))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_repair_uids(uid): async def test_repair_uids(uid):
s = MemoryStorage() s = MemoryStorage()
@ -38,12 +40,12 @@ async def test_repair_uids(uid):
@given(uid=uid_strategy.filter(lambda x: not href_safe(x))) @given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
# Using the random module for UIDs: # Using the random module for UIDs:
@settings(suppress_health_check=HealthCheck.all()) @settings(suppress_health_check=list(HealthCheck))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_repair_unsafe_uids(uid): async def test_repair_unsafe_uids(uid):
s = MemoryStorage() s = MemoryStorage()
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
href, etag = await s.upload(item) href, _etag = await s.upload(item)
assert (await s.get(href))[0].uid == uid assert (await s.get(href))[0].uid == uid
assert not href_safe(uid) assert not href_safe(uid)
@ -56,7 +58,7 @@ async def test_repair_unsafe_uids(uid):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")] ("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
) )
def test_repair_unsafe_href(uid, href): def test_repair_unsafe_href(uid, href):
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")

136
tests/unit/test_retry.py Normal file
View file

@ -0,0 +1,136 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock
from unittest.mock import Mock
import aiohttp
import pytest
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
async def _create_mock_response(status: int, body: str | dict):
raw_body = body
text_body = json.dumps(body) if isinstance(body, dict) else body
mock_response = AsyncMock()
mock_response.status = status
mock_response.ok = 200 <= status < 300
mock_response.reason = "OK" if mock_response.ok else "Forbidden"
mock_response.headers = (
{"Content-Type": "application/json"}
if isinstance(raw_body, dict)
else {"Content-Type": "text/plain"}
)
mock_response.text.return_value = text_body
if isinstance(raw_body, dict):
mock_response.json.return_value = raw_body
else:
mock_response.json.side_effect = ValueError("Not JSON")
mock_response.raise_for_status = Mock(
side_effect=(
aiohttp.ClientResponseError(
request_info=AsyncMock(),
history=(),
status=status,
message=mock_response.reason,
headers=mock_response.headers,
)
if not mock_response.ok
else None
)
)
return mock_response
@pytest.mark.asyncio
async def test_request_retry_on_usage_limit():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate (max_retries - 1) 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries - 1)
] + [await _create_mock_response(200, "OK")]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
response = await request("GET", url, mock_session)
assert response.status == 200
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_retry_exceeds_max_attempts():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate max_retries 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries)
]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
with pytest.raises(UsageLimitReached):
await request("GET", url, mock_session)
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_json():
url = "http://example.com/api"
mock_session = AsyncMock()
# Generic non-Google 403 error payload (e.g., GitHub-style)
mock_session.request.side_effect = [
await _create_mock_response(403, {"message": "API rate limit exceeded"})
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because it's not the Google quotaExceeded shape
assert mock_session.request.call_count == 1
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_text():
url = "http://example.com/api"
mock_session = AsyncMock()
# Plain-text 403 body mentioning rate limits, but not structured as Google error
mock_session.request.side_effect = [
await _create_mock_response(403, "Rate limit exceeded")
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because the JSON shape is not Google quotaExceeded
assert mock_session.request.call_count == 1

View file

@ -1,3 +1,5 @@
from __future__ import annotations
from textwrap import dedent from textwrap import dedent
import hypothesis.strategies as st import hypothesis.strategies as st
@ -23,7 +25,7 @@ _simple_split = [
] ]
_simple_joined = "\r\n".join( _simple_joined = "\r\n".join(
["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"] ["BEGIN:VADDRESSBOOK", *_simple_split, "END:VADDRESSBOOK\r\n"]
) )
@ -122,7 +124,7 @@ def test_split_collection_timezones():
"END:VTIMEZONE" "END:VTIMEZONE"
) )
full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"]) full = "\r\n".join(["BEGIN:VCALENDAR", *items, timezone, "END:VCALENDAR"])
given = {normalize_item(item) for item in vobject.split_collection(full)} given = {normalize_item(item) for item in vobject.split_collection(full)}
expected = { expected = {
@ -152,7 +154,7 @@ def test_hash_item():
def test_multiline_uid(benchmark): def test_multiline_uid(benchmark):
a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n" a = "BEGIN:FOO\r\nUID:123456789abcd\r\n efgh\r\nEND:FOO\r\n"
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh" assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
@ -235,6 +237,31 @@ def test_broken_item():
assert item.parsed is None assert item.parsed is None
def test_mismatched_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"END:BAR",
]
)
assert "Got END:BAR, expected END:FOO at line 2" in str(excinfo.value)
def test_missing_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"BEGIN:BAR",
"END:BAR",
]
)
assert "Missing END for component(s): FOO" in str(excinfo.value)
def test_multiple_items(): def test_multiple_items():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
vobject._Component.parse( vobject._Component.parse(
@ -272,7 +299,7 @@ def test_input_types():
value_strategy = st.text( value_strategy = st.text(
st.characters( st.characters(
blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":=" exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), exclude_characters=":="
), ),
min_size=1, min_size=1,
).filter(lambda x: x.strip() == x) ).filter(lambda x: x.strip() == x)
@ -308,7 +335,8 @@ class VobjectMachine(RuleBasedStateMachine):
assert key in c assert key in c
assert c.get(key) == value assert c.get(key) == value
dump = "\r\n".join(c.dump_lines()) dump = "\r\n".join(c.dump_lines())
assert key in dump and value in dump assert key in dump
assert value in dump
@rule( @rule(
c=Parsed, c=Parsed,
@ -338,6 +366,16 @@ class VobjectMachine(RuleBasedStateMachine):
TestVobjectMachine = VobjectMachine.TestCase TestVobjectMachine = VobjectMachine.TestCase
def test_dupe_consecutive_keys():
state = VobjectMachine()
unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
parsed_0 = state.parse(unparsed=unparsed_0)
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop(c=parsed_0, key="0", value="1")
state.teardown()
def test_component_contains(): def test_component_contains():
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"]) item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
@ -345,4 +383,4 @@ def test_component_contains():
assert "BAZ" not in item assert "BAZ" not in item
with pytest.raises(ValueError): with pytest.raises(ValueError):
42 in item # noqa: B015 42 in item # noqa: B015, this check raises.

View file

@ -2,13 +2,14 @@
Vdirsyncer synchronizes calendars and contacts. Vdirsyncer synchronizes calendars and contacts.
""" """
from __future__ import annotations
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
BUGTRACKER_HOME = PROJECT_HOME + "/issues" BUGTRACKER_HOME = PROJECT_HOME + "/issues"
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable" DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
try: try:
from .version import version as __version__ # noqa from .version import version as __version__
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
raise ImportError( raise ImportError(
"Failed to find (autogenerated) version.py. " "Failed to find (autogenerated) version.py. "
@ -16,12 +17,14 @@ except ImportError: # pragma: no cover
"use the PyPI ones." "use the PyPI ones."
) )
__all__ = ["__version__"]
def _check_python_version(): # pragma: no cover
def _check_python_version():
import sys import sys
if sys.version_info < (3, 7, 0): if sys.version_info < (3, 9, 0): # noqa: UP036
print("vdirsyncer requires at least Python 3.7.") print("vdirsyncer requires at least Python 3.9.")
sys.exit(1) sys.exit(1)

View file

@ -1,3 +1,5 @@
from __future__ import annotations
if __name__ == "__main__": if __name__ == "__main__":
from vdirsyncer.cli import app from vdirsyncer.cli import app

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import asyncio import asyncio
import functools import functools
import json import json
@ -8,12 +10,15 @@ import aiohttp
import click import click
import click_log import click_log
from .. import BUGTRACKER_HOME from vdirsyncer import BUGTRACKER_HOME
from .. import __version__ from vdirsyncer import __version__
cli_logger = logging.getLogger(__name__) cli_logger = logging.getLogger(__name__)
click_log.basic_config("vdirsyncer") click_log.basic_config("vdirsyncer")
# add short option for the help option
click_context_settings = {"help_option_names": ["-h", "--help"]}
class AppContext: class AppContext:
def __init__(self): def __init__(self):
@ -39,13 +44,13 @@ def catch_errors(f):
return inner return inner
@click.group() @click.group(context_settings=click_context_settings)
@click_log.simple_verbosity_option("vdirsyncer") @click_log.simple_verbosity_option("vdirsyncer")
@click.version_option(version=__version__) @click.version_option(version=__version__)
@click.option("--config", "-c", metavar="FILE", help="Config file to use.") @click.option("--config", "-c", metavar="FILE", help="Config file to use.")
@pass_context @pass_context
@catch_errors @catch_errors
def app(ctx, config): def app(ctx, config: str):
""" """
Synchronize calendars and contacts Synchronize calendars and contacts
""" """
@ -54,7 +59,7 @@ def app(ctx, config):
cli_logger.warning( cli_logger.warning(
"Vdirsyncer currently does not support Windows. " "Vdirsyncer currently does not support Windows. "
"You will likely encounter bugs. " "You will likely encounter bugs. "
"See {}/535 for more information.".format(BUGTRACKER_HOME) f"See {BUGTRACKER_HOME}/535 for more information."
) )
if not ctx.config: if not ctx.config:
@ -63,9 +68,6 @@ def app(ctx, config):
ctx.config = load_config(config) ctx.config = load_config(config)
main = app
def collections_arg_callback(ctx, param, value): def collections_arg_callback(ctx, param, value):
""" """
Expand the various CLI shortforms ("pair, pair/collection") to an iterable Expand the various CLI shortforms ("pair, pair/collection") to an iterable
@ -145,7 +147,14 @@ def sync(ctx, collections, force_delete):
) )
) )
await asyncio.gather(*tasks) # `return_exceptions=True` ensures that the event loop lives long enough for
# backoffs to be able to finish
gathered = await asyncio.gather(*tasks, return_exceptions=True)
# but now we need to manually check for and propogate a single failure after
# allowing all tasks to finish in order to keep exit status non-zero
failures = [e for e in gathered if isinstance(e, BaseException)]
if failures:
raise failures[0]
asyncio.run(main(collections)) asyncio.run(main(collections))
@ -165,7 +174,6 @@ def metasync(ctx, collections):
async def main(collection_names): async def main(collection_names):
async with aiohttp.TCPConnector(limit_per_host=16) as conn: async with aiohttp.TCPConnector(limit_per_host=16) as conn:
for pair_name, collections in collection_names: for pair_name, collections in collection_names:
collections = prepare_pair( collections = prepare_pair(
pair_name=pair_name, pair_name=pair_name,

View file

@ -3,13 +3,18 @@ from __future__ import annotations
import json import json
import os import os
import string import string
from collections.abc import Generator
from configparser import RawConfigParser from configparser import RawConfigParser
from functools import cached_property
from itertools import chain from itertools import chain
from typing import IO
from typing import Any
from vdirsyncer import PROJECT_HOME
from vdirsyncer import exceptions
from vdirsyncer.utils import expand_path
from vdirsyncer.vobject import Item
from .. import PROJECT_HOME
from .. import exceptions
from ..utils import cached_property
from ..utils import expand_path
from .fetchparams import expand_fetch_params from .fetchparams import expand_fetch_params
from .utils import storage_class_from_config from .utils import storage_class_from_config
@ -23,16 +28,16 @@ def validate_section_name(name, section_type):
if invalid: if invalid:
chars_display = "".join(sorted(SECTION_NAME_CHARS)) chars_display = "".join(sorted(SECTION_NAME_CHARS))
raise exceptions.UserError( raise exceptions.UserError(
'The {}-section "{}" contains invalid characters. Only ' f'The {section_type}-section "{name}" contains invalid characters. Only '
"the following characters are allowed for storage and " "the following characters are allowed for storage and "
"pair names:\n{}".format(section_type, name, chars_display) f"pair names:\n{chars_display}"
) )
def _validate_general_section(general_config): def _validate_general_section(general_config: dict[str, str]):
invalid = set(general_config) - GENERAL_ALL invalid = set(general_config) - GENERAL_ALL
missing = GENERAL_REQUIRED - set(general_config) missing = GENERAL_REQUIRED - set(general_config)
problems = [] problems: list[str] = []
if invalid: if invalid:
problems.append( problems.append(
@ -47,7 +52,7 @@ def _validate_general_section(general_config):
if problems: if problems:
raise exceptions.UserError( raise exceptions.UserError(
"Invalid general section. Copy the example " "Invalid general section. Copy the example "
"config from the repository and edit it: {}".format(PROJECT_HOME), f"config from the repository and edit it: {PROJECT_HOME}",
problems=problems, problems=problems,
) )
@ -88,21 +93,31 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.") raise ValueError("Duplicate value.")
collection_names.add(collection_name) collection_names.add(collection_name)
except ValueError as e: except ValueError as e:
raise ValueError(f"`collections` parameter, position {i}: {str(e)}") raise ValueError(f"`collections` parameter, position {i}: {e!s}")
def _validate_implicit_param(implicit):
if implicit is None:
return
if implicit != "create":
raise ValueError("`implicit` parameter must be 'create' or absent.")
class _ConfigReader: class _ConfigReader:
def __init__(self, f): def __init__(self, f: IO[Any]):
self._file = f self._file: IO[Any] = f
self._parser = c = RawConfigParser() self._parser = c = RawConfigParser()
c.read_file(f) c.read_file(f)
self._seen_names = set() self._seen_names: set = set()
self._general = {} self._general: dict[str, str] = {}
self._pairs = {} self._pairs: dict[str, dict[str, str]] = {}
self._storages = {} self._storages: dict[str, dict[str, str]] = {}
def _parse_section(self, section_type, name, options): def _parse_section(
self, section_type: str, name: str, options: dict[str, Any]
) -> None:
validate_section_name(name, section_type) validate_section_name(name, section_type)
if name in self._seen_names: if name in self._seen_names:
raise ValueError(f'Name "{name}" already used.') raise ValueError(f'Name "{name}" already used.')
@ -119,7 +134,9 @@ class _ConfigReader:
else: else:
raise ValueError("Unknown section type.") raise ValueError("Unknown section type.")
def parse(self): def parse(
self,
) -> tuple[dict[str, str], dict[str, dict[str, str]], dict[str, dict[str, str]]]:
for section in self._parser.sections(): for section in self._parser.sections():
if " " in section: if " " in section:
section_type, name = section.split(" ", 1) section_type, name = section.split(" ", 1)
@ -133,7 +150,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)), dict(_parse_options(self._parser.items(section), section=section)),
) )
except ValueError as e: except ValueError as e:
raise exceptions.UserError(f'Section "{section}": {str(e)}') raise exceptions.UserError(f'Section "{section}": {e!s}')
_validate_general_section(self._general) _validate_general_section(self._general)
if getattr(self._file, "name", None): if getattr(self._file, "name", None):
@ -145,7 +162,9 @@ class _ConfigReader:
return self._general, self._pairs, self._storages return self._general, self._pairs, self._storages
def _parse_options(items, section=None): def _parse_options(
items: list[tuple[str, str]], section: str | None = None
) -> Generator[tuple[str, dict[str, str]], None, None]:
for key, value in items: for key, value in items:
try: try:
yield key, json.loads(value) yield key, json.loads(value)
@ -154,13 +173,18 @@ def _parse_options(items, section=None):
class Config: class Config:
def __init__(self, general, pairs, storages): def __init__(
self,
general: dict[str, str],
pairs: dict[str, dict[str, str]],
storages: dict[str, dict[str, str]],
) -> None:
self.general = general self.general = general
self.storages = storages self.storages = storages
for name, options in storages.items(): for name, options in storages.items():
options["instance_name"] = name options["instance_name"] = name
self.pairs = {} self.pairs: dict[str, PairConfig] = {}
for name, options in pairs.items(): for name, options in pairs.items():
try: try:
self.pairs[name] = PairConfig(self, name, options) self.pairs[name] = PairConfig(self, name, options)
@ -168,12 +192,12 @@ class Config:
raise exceptions.UserError(f"Pair {name}: {e}") raise exceptions.UserError(f"Pair {name}: {e}")
@classmethod @classmethod
def from_fileobject(cls, f): def from_fileobject(cls, f: IO[Any]):
reader = _ConfigReader(f) reader = _ConfigReader(f)
return cls(*reader.parse()) return cls(*reader.parse())
@classmethod @classmethod
def from_filename_or_environment(cls, fname=None): def from_filename_or_environment(cls, fname: str | None = None):
if fname is None: if fname is None:
fname = os.environ.get("VDIRSYNCER_CONFIG", None) fname = os.environ.get("VDIRSYNCER_CONFIG", None)
if fname is None: if fname is None:
@ -190,15 +214,13 @@ class Config:
except Exception as e: except Exception as e:
raise exceptions.UserError(f"Error during reading config {fname}: {e}") raise exceptions.UserError(f"Error during reading config {fname}: {e}")
def get_storage_args(self, storage_name): def get_storage_args(self, storage_name: str):
try: try:
args = self.storages[storage_name] args = self.storages[storage_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
"Storage {!r} not found. " f"Storage {storage_name!r} not found. "
"These are the configured storages: {}".format( f"These are the configured storages: {list(self.storages)}"
storage_name, list(self.storages)
)
) )
else: else:
return expand_fetch_params(args) return expand_fetch_params(args)
@ -211,14 +233,15 @@ class Config:
class PairConfig: class PairConfig:
def __init__(self, full_config, name, options): def __init__(self, full_config: Config, name: str, options: dict[str, str]):
self._config = full_config self._config: Config = full_config
self.name = name self.name: str = name
self.name_a = options.pop("a") self.name_a: str = options.pop("a")
self.name_b = options.pop("b") self.name_b: str = options.pop("b")
self.implicit = options.pop("implicit", None)
self._partial_sync = options.pop("partial_sync", None) self._partial_sync: str | None = options.pop("partial_sync", None)
self.metadata = options.pop("metadata", None) or () self.metadata: str | tuple[()] = options.pop("metadata", ())
self.conflict_resolution = self._process_conflict_resolution_param( self.conflict_resolution = self._process_conflict_resolution_param(
options.pop("conflict_resolution", None) options.pop("conflict_resolution", None)
@ -234,14 +257,17 @@ class PairConfig:
) )
else: else:
_validate_collections_param(self.collections) _validate_collections_param(self.collections)
_validate_implicit_param(self.implicit)
if options: if options:
raise ValueError("Unknown options: {}".format(", ".join(options))) raise ValueError("Unknown options: {}".format(", ".join(options)))
def _process_conflict_resolution_param(self, conflict_resolution): def _process_conflict_resolution_param(
self, conflict_resolution: str | list[str] | None
):
if conflict_resolution in (None, "a wins", "b wins"): if conflict_resolution in (None, "a wins", "b wins"):
return conflict_resolution return conflict_resolution
elif ( if (
isinstance(conflict_resolution, list) isinstance(conflict_resolution, list)
and len(conflict_resolution) > 1 and len(conflict_resolution) > 1
and conflict_resolution[0] == "command" and conflict_resolution[0] == "command"
@ -255,8 +281,7 @@ class PairConfig:
return _resolve_conflict_via_command(a, b, command, a_name, b_name) return _resolve_conflict_via_command(a, b, command, a_name, b_name)
return resolve return resolve
else: raise ValueError("Invalid value for `conflict_resolution`.")
raise ValueError("Invalid value for `conflict_resolution`.")
# The following parameters are lazily evaluated because evaluating # The following parameters are lazily evaluated because evaluating
# self.config_a would expand all `x.fetch` parameters. This is costly and # self.config_a would expand all `x.fetch` parameters. This is costly and
@ -302,10 +327,10 @@ class PairConfig:
class CollectionConfig: class CollectionConfig:
def __init__(self, pair, name, config_a, config_b): def __init__(self, pair, name: str, config_a, config_b):
self.pair = pair self.pair = pair
self._config = pair._config self._config = pair._config
self.name = name self.name: str = name
self.config_a = config_a self.config_a = config_a
self.config_b = config_b self.config_b = config_b
@ -314,14 +339,16 @@ class CollectionConfig:
load_config = Config.from_filename_or_environment load_config = Config.from_filename_or_environment
def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None): def _resolve_conflict_via_command(
a, b, command, a_name, b_name, _check_call=None
) -> Item:
import shutil import shutil
import tempfile import tempfile
if _check_call is None: if _check_call is None:
from subprocess import check_call as _check_call from subprocess import check_call as _check_call
from ..vobject import Item from vdirsyncer.vobject import Item
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.") dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
try: try:
@ -334,7 +361,7 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=Non
f.write(b.raw) f.write(b.raw)
command[0] = expand_path(command[0]) command[0] = expand_path(command[0])
_check_call(command + [a_tmp, b_tmp]) _check_call([*command, a_tmp, b_tmp])
with open(a_tmp) as f: with open(a_tmp) as f:
new_a = f.read() new_a = f.read()
@ -342,7 +369,7 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=Non
new_b = f.read() new_b = f.read()
if new_a != new_b: if new_a != new_b:
raise exceptions.UserError("The two files are not completely " "equal.") raise exceptions.UserError("The two files are not completely equal.")
return Item(new_a) return Item(new_a)
finally: finally:
shutil.rmtree(dir) shutil.rmtree(dir)

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import asyncio import asyncio
import hashlib import hashlib
import json import json
@ -7,7 +9,8 @@ import sys
import aiohttp import aiohttp
import aiostream import aiostream
from .. import exceptions from vdirsyncer import exceptions
from .utils import handle_collection_not_found from .utils import handle_collection_not_found
from .utils import handle_storage_init_error from .utils import handle_storage_init_error
from .utils import load_status from .utils import load_status
@ -63,21 +66,19 @@ async def collections_for_pair(
rv["collections"], pair.config_a, pair.config_b rv["collections"], pair.config_a, pair.config_b
) )
) )
elif rv: if rv:
raise exceptions.UserError( raise exceptions.UserError(
"Detected change in config file, " "Detected change in config file, "
"please run `vdirsyncer discover {}`.".format(pair.name) f"please run `vdirsyncer discover {pair.name}`."
)
else:
raise exceptions.UserError(
"Please run `vdirsyncer discover {}` "
" before synchronization.".format(pair.name)
) )
raise exceptions.UserError(
f"Please run `vdirsyncer discover {pair.name}` before synchronization."
)
logger.info(f"Discovering collections for pair {pair.name}") logger.info(f"Discovering collections for pair {pair.name}")
a_discovered = _DiscoverResult(pair.config_a, connector=connector) a_discovered = DiscoverResult(pair.config_a, connector=connector)
b_discovered = _DiscoverResult(pair.config_b, connector=connector) b_discovered = DiscoverResult(pair.config_b, connector=connector)
if list_collections: if list_collections:
# TODO: We should gather data and THEN print, so it can be async. # TODO: We should gather data and THEN print, so it can be async.
@ -92,24 +93,31 @@ async def collections_for_pair(
connector=connector, connector=connector,
) )
async def _handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
return await handle_collection_not_found(
config, collection, e=e, implicit_create=pair.implicit == "create"
)
# We have to use a list here because the special None/null value would get # We have to use a list here because the special None/null value would get
# mangled to string (because JSON objects always have string keys). # mangled to string (because JSON objects always have string keys).
rv = await aiostream.stream.list( rv = await aiostream.stream.list( # type: ignore[assignment]
expand_collections( expand_collections(
shortcuts=pair.collections, shortcuts=pair.collections,
config_a=pair.config_a, config_a=pair.config_a,
config_b=pair.config_b, config_b=pair.config_b,
get_a_discovered=a_discovered.get_self, get_a_discovered=a_discovered.get_self,
get_b_discovered=b_discovered.get_self, get_b_discovered=b_discovered.get_self,
_handle_collection_not_found=handle_collection_not_found, _handle_collection_not_found=_handle_collection_not_found,
) )
) )
await _sanity_check_collections(rv, connector=connector) await _sanity_check_collections(rv, connector=connector)
save_status( save_status(
status_path, base_path=status_path,
pair.name, pair=pair.name,
data_type="collections", data_type="collections",
data={ data={
"collections": list( "collections": list(
@ -155,7 +163,7 @@ def _expand_collections_cache(collections, config_a, config_b):
yield name, (a, b) yield name, (a, b)
class _DiscoverResult: class DiscoverResult:
def __init__(self, config, *, connector): def __init__(self, config, *, connector):
self._cls, _ = storage_class_from_config(config) self._cls, _ = storage_class_from_config(config)
@ -163,6 +171,7 @@ class _DiscoverResult:
"CardDAVStorage", "CardDAVStorage",
"CalDAVStorage", "CalDAVStorage",
"GoogleCalendarStorage", "GoogleCalendarStorage",
"GoogleContactsStorage",
]: ]:
assert connector is not None assert connector is not None
config["connector"] = connector config["connector"] = connector
@ -270,8 +279,8 @@ async def _print_collections(
logger.debug("".join(traceback.format_tb(sys.exc_info()[2]))) logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
logger.warning( logger.warning(
"Failed to discover collections for {}, use `-vdebug` " f"Failed to discover collections for {instance_name}, use `-vdebug` "
"to see the full traceback.".format(instance_name) "to see the full traceback."
) )
return return
logger.info(f"{instance_name}:") logger.info(f"{instance_name}:")

View file

@ -1,10 +1,13 @@
from __future__ import annotations
import logging import logging
import click import click
from .. import exceptions from vdirsyncer import exceptions
from ..utils import expand_path from vdirsyncer.utils import expand_path
from ..utils import synchronized from vdirsyncer.utils import synchronized
from . import AppContext from . import AppContext
SUFFIX = ".fetch" SUFFIX = ".fetch"
@ -37,7 +40,7 @@ def _fetch_value(opts, key):
try: try:
ctx = click.get_current_context().find_object(AppContext) ctx = click.get_current_context().find_object(AppContext)
if ctx is None: if ctx is None:
raise RuntimeError() raise RuntimeError
password_cache = ctx.fetched_params password_cache = ctx.fetched_params
except RuntimeError: except RuntimeError:
password_cache = {} password_cache = {}
@ -65,8 +68,7 @@ def _fetch_value(opts, key):
else: else:
if not rv: if not rv:
raise exceptions.UserError( raise exceptions.UserError(
"Empty value for {}, this most likely " f"Empty value for {key}, this most likely indicates an error."
"indicates an error.".format(key)
) )
password_cache[cache_key] = rv password_cache[cache_key] = rv
return rv return rv
@ -86,7 +88,7 @@ def _strategy_command(*command: str, shell: bool = False):
return stdout.strip("\n") return stdout.strip("\n")
except OSError as e: except OSError as e:
cmd = " ".join(expanded_command) cmd = " ".join(expanded_command)
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{str(e)}") raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}")
def _strategy_shell(*command: str): def _strategy_shell(*command: str):

View file

@ -1,12 +1,15 @@
from __future__ import annotations
import json import json
import aiohttp import aiohttp
from .. import exceptions from vdirsyncer import exceptions
from .. import sync from vdirsyncer import sync
from .config import CollectionConfig from .config import CollectionConfig
from .discover import DiscoverResult
from .discover import collections_for_pair from .discover import collections_for_pair
from .discover import storage_class_from_config
from .discover import storage_instance_from_config from .discover import storage_instance_from_config
from .utils import JobFailed from .utils import JobFailed
from .utils import cli_logger from .utils import cli_logger
@ -33,10 +36,8 @@ async def prepare_pair(pair_name, collections, config, *, connector):
config_a, config_b = all_collections[collection_name] config_a, config_b = all_collections[collection_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
"Pair {}: Collection {} not found. These are the " f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found."
"configured collections:\n{}".format( f"These are the configured collections:\n{list(all_collections)}"
pair_name, json.dumps(collection_name), list(all_collections)
)
) )
collection = CollectionConfig(pair, collection_name, config_a, config_b) collection = CollectionConfig(pair, collection_name, config_a, config_b)
@ -80,12 +81,12 @@ async def sync_collection(
) )
if sync_failed: if sync_failed:
raise JobFailed() raise JobFailed
except JobFailed: except JobFailed:
raise raise
except BaseException: except BaseException:
handle_cli_error(status_name) handle_cli_error(status_name)
raise JobFailed() raise JobFailed
async def discover_collections(pair, **kwargs): async def discover_collections(pair, **kwargs):
@ -103,26 +104,26 @@ async def repair_collection(
*, *,
connector: aiohttp.TCPConnector, connector: aiohttp.TCPConnector,
): ):
from ..repair import repair_storage from vdirsyncer.repair import repair_storage
storage_name, collection = collection, None storage_name, collection = collection, None
if "/" in storage_name: if "/" in storage_name:
storage_name, collection = storage_name.split("/") storage_name, collection = storage_name.split("/")
config = config.get_storage_args(storage_name) config = config.get_storage_args(storage_name)
storage_type = config["type"] # If storage type has a slash, ignore it and anything after it.
storage_type = config["type"].split("/")[0]
if collection is not None: if collection is not None:
cli_logger.info("Discovering collections (skipping cache).") cli_logger.info("Discovering collections (skipping cache).")
cls, config = storage_class_from_config(config) get_discovered = DiscoverResult(config, connector=connector)
async for config in cls.discover(**config): discovered = await get_discovered.get_self()
for config in discovered.values():
if config["collection"] == collection: if config["collection"] == collection:
break break
else: else:
raise exceptions.UserError( raise exceptions.UserError(
"Couldn't find collection {} for storage {}.".format( f"Couldn't find collection {collection} for storage {storage_name}."
collection, storage_name
)
) )
config["type"] = storage_type config["type"] = storage_type
@ -134,7 +135,7 @@ async def repair_collection(
async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector): async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector):
from ..metasync import metasync from vdirsyncer.metasync import metasync
pair = collection.pair pair = collection.pair
status_name = get_status_name(pair.name, collection.name) status_name = get_status_name(pair.name, collection.name)
@ -142,11 +143,11 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
try: try:
cli_logger.info(f"Metasyncing {status_name}") cli_logger.info(f"Metasyncing {status_name}")
status = ( status = load_status(
load_status( general["status_path"],
general["status_path"], pair.name, collection.name, data_type="metadata" pair.name,
) collection.name,
or {} data_type="metadata",
) )
a = await storage_instance_from_config(collection.config_a, connector=connector) a = await storage_instance_from_config(collection.config_a, connector=connector)
@ -161,12 +162,12 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
) )
except BaseException: except BaseException:
handle_cli_error(status_name) handle_cli_error(status_name)
raise JobFailed() raise JobFailed
save_status( save_status(
general["status_path"], base_path=general["status_path"],
pair.name, pair=pair.name,
collection.name,
data_type="metadata", data_type="metadata",
data=status, data=status,
collection=collection.name,
) )

View file

@ -1,24 +1,29 @@
from __future__ import annotations
import contextlib import contextlib
import errno import errno
import importlib import importlib
import json import json
import os import os
import sys import sys
from typing import Any
import aiohttp import aiohttp
import click import click
from atomicwrites import atomic_write
from .. import BUGTRACKER_HOME from vdirsyncer import BUGTRACKER_HOME
from .. import DOCS_HOME from vdirsyncer import DOCS_HOME
from .. import exceptions from vdirsyncer import exceptions
from ..sync.exceptions import IdentConflict from vdirsyncer.storage.base import Storage
from ..sync.exceptions import PartialSync from vdirsyncer.sync.exceptions import IdentConflict
from ..sync.exceptions import StorageEmpty from vdirsyncer.sync.exceptions import PartialSync
from ..sync.exceptions import SyncConflict from vdirsyncer.sync.exceptions import StorageEmpty
from ..sync.status import SqliteStatus from vdirsyncer.sync.exceptions import SyncConflict
from ..utils import expand_path from vdirsyncer.sync.status import SqliteStatus
from ..utils import get_storage_init_args from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_storage_init_args
from . import cli_logger from . import cli_logger
STATUS_PERMISSIONS = 0o600 STATUS_PERMISSIONS = 0o600
@ -26,18 +31,18 @@ STATUS_DIR_PERMISSIONS = 0o700
class _StorageIndex: class _StorageIndex:
def __init__(self): def __init__(self) -> None:
self._storages = dict( self._storages: dict[str, str] = {
caldav="vdirsyncer.storage.dav.CalDAVStorage", "caldav": "vdirsyncer.storage.dav.CalDAVStorage",
carddav="vdirsyncer.storage.dav.CardDAVStorage", "carddav": "vdirsyncer.storage.dav.CardDAVStorage",
filesystem="vdirsyncer.storage.filesystem.FilesystemStorage", "filesystem": "vdirsyncer.storage.filesystem.FilesystemStorage",
http="vdirsyncer.storage.http.HttpStorage", "http": "vdirsyncer.storage.http.HttpStorage",
singlefile="vdirsyncer.storage.singlefile.SingleFileStorage", "singlefile": "vdirsyncer.storage.singlefile.SingleFileStorage",
google_calendar="vdirsyncer.storage.google.GoogleCalendarStorage", "google_calendar": "vdirsyncer.storage.google.GoogleCalendarStorage",
google_contacts="vdirsyncer.storage.google.GoogleContactsStorage", "google_contacts": "vdirsyncer.storage.google.GoogleContactsStorage",
) }
def __getitem__(self, name): def __getitem__(self, name: str) -> Storage:
item = self._storages[name] item = self._storages[name]
if not isinstance(item, str): if not isinstance(item, str):
return item return item
@ -74,33 +79,27 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.critical(e) cli_logger.critical(e)
except StorageEmpty as e: except StorageEmpty as e:
cli_logger.error( cli_logger.error(
'{status_name}: Storage "{name}" was completely emptied. If you ' f'{status_name}: Storage "{e.empty_storage.instance_name}" was '
"want to delete ALL entries on BOTH sides, then use " "completely emptied. If you want to delete ALL entries on BOTH sides,"
"`vdirsyncer sync --force-delete {status_name}`. " f"then use `vdirsyncer sync --force-delete {status_name}`. "
"Otherwise delete the files for {status_name} in your status " f"Otherwise delete the files for {status_name} in your status "
"directory.".format( "directory."
name=e.empty_storage.instance_name, status_name=status_name
)
) )
except PartialSync as e: except PartialSync as e:
cli_logger.error( cli_logger.error(
"{status_name}: Attempted change on {storage}, which is read-only" f"{status_name}: Attempted change on {e.storage}, which is read-only"
". Set `partial_sync` in your pair section to `ignore` to ignore " ". Set `partial_sync` in your pair section to `ignore` to ignore "
"those changes, or `revert` to revert them on the other side.".format( "those changes, or `revert` to revert them on the other side."
status_name=status_name, storage=e.storage
)
) )
except SyncConflict as e: except SyncConflict as e:
cli_logger.error( cli_logger.error(
"{status_name}: One item changed on both sides. Resolve this " f"{status_name}: One item changed on both sides. Resolve this "
"conflict manually, or by setting the `conflict_resolution` " "conflict manually, or by setting the `conflict_resolution` "
"parameter in your config file.\n" "parameter in your config file.\n"
"See also {docs}/config.html#pair-section\n" f"See also {DOCS_HOME}/config.html#pair-section\n"
"Item ID: {e.ident}\n" f"Item ID: {e.ident}\n"
"Item href on side A: {e.href_a}\n" f"Item href on side A: {e.href_a}\n"
"Item href on side B: {e.href_b}\n".format( f"Item href on side B: {e.href_b}\n"
status_name=status_name, e=e, docs=DOCS_HOME
)
) )
except IdentConflict as e: except IdentConflict as e:
cli_logger.error( cli_logger.error(
@ -121,17 +120,17 @@ def handle_cli_error(status_name=None, e=None):
pass pass
except exceptions.PairNotFound as e: except exceptions.PairNotFound as e:
cli_logger.error( cli_logger.error(
"Pair {pair_name} does not exist. Please check your " f"Pair {e.pair_name} does not exist. Please check your "
"configuration file and make sure you've typed the pair name " "configuration file and make sure you've typed the pair name "
"correctly".format(pair_name=e.pair_name) "correctly"
) )
except exceptions.InvalidResponse as e: except exceptions.InvalidResponse as e:
cli_logger.error( cli_logger.error(
"The server returned something vdirsyncer doesn't understand. " "The server returned something vdirsyncer doesn't understand. "
"Error message: {!r}\n" f"Error message: {e!r}\n"
"While this is most likely a serverside problem, the vdirsyncer " "While this is most likely a serverside problem, the vdirsyncer "
"devs are generally interested in such bugs. Please report it in " "devs are generally interested in such bugs. Please report it in "
"the issue tracker at {}".format(e, BUGTRACKER_HOME) f"the issue tracker at {BUGTRACKER_HOME}"
) )
except exceptions.CollectionRequired: except exceptions.CollectionRequired:
cli_logger.error( cli_logger.error(
@ -154,13 +153,18 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.debug("".join(tb)) cli_logger.debug("".join(tb))
def get_status_name(pair, collection): def get_status_name(pair: str, collection: str | None) -> str:
if collection is None: if collection is None:
return pair return pair
return pair + "/" + collection return pair + "/" + collection
def get_status_path(base_path, pair, collection=None, data_type=None): def get_status_path(
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> str:
assert data_type is not None assert data_type is not None
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) path = expand_path(os.path.join(base_path, status_name))
@ -174,10 +178,15 @@ def get_status_path(base_path, pair, collection=None, data_type=None):
return path return path
def load_status(base_path, pair, collection=None, data_type=None): def load_status(
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> dict[str, Any]:
path = get_status_path(base_path, pair, collection, data_type) path = get_status_path(base_path, pair, collection, data_type)
if not os.path.exists(path): if not os.path.exists(path):
return None return {}
assert_permissions(path, STATUS_PERMISSIONS) assert_permissions(path, STATUS_PERMISSIONS)
with open(path) as f: with open(path) as f:
@ -189,7 +198,7 @@ def load_status(base_path, pair, collection=None, data_type=None):
return {} return {}
def prepare_status_path(path): def prepare_status_path(path: str) -> None:
dirname = os.path.dirname(path) dirname = os.path.dirname(path)
try: try:
@ -200,7 +209,7 @@ def prepare_status_path(path):
@contextlib.contextmanager @contextlib.contextmanager
def manage_sync_status(base_path, pair_name, collection_name): def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
path = get_status_path(base_path, pair_name, collection_name, "items") path = get_status_path(base_path, pair_name, collection_name, "items")
status = None status = None
legacy_status = None legacy_status = None
@ -222,12 +231,17 @@ def manage_sync_status(base_path, pair_name, collection_name):
prepare_status_path(path) prepare_status_path(path)
status = SqliteStatus(path) status = SqliteStatus(path)
yield status with contextlib.closing(status):
yield status
def save_status(base_path, pair, collection=None, data_type=None, data=None): def save_status(
assert data_type is not None base_path: str,
assert data is not None pair: str,
data_type: str,
data: dict[str, Any],
collection: str | None = None,
) -> None:
status_name = get_status_name(pair, collection) status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) + "." + data_type path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
prepare_status_path(path) prepare_status_path(path)
@ -272,15 +286,14 @@ async def storage_instance_from_config(
except exceptions.CollectionNotFound as e: except exceptions.CollectionNotFound as e:
if create: if create:
config = await handle_collection_not_found( config = await handle_collection_not_found(
config, config.get("collection", None), e=str(e) config, config.get("collection", None), e=str(e), implicit_create=True
) )
return await storage_instance_from_config( return await storage_instance_from_config(
config, config,
create=False, create=False,
connector=connector, connector=connector,
) )
else: raise
raise
except Exception: except Exception:
return handle_storage_init_error(cls, new_config) return handle_storage_init_error(cls, new_config)
@ -319,18 +332,18 @@ def handle_storage_init_error(cls, config):
) )
def assert_permissions(path, wanted): def assert_permissions(path: str, wanted: int) -> None:
permissions = os.stat(path).st_mode & 0o777 permissions = os.stat(path).st_mode & 0o777
if permissions > wanted: if permissions > wanted:
cli_logger.warning( cli_logger.warning(
"Correcting permissions of {} from {:o} to {:o}".format( f"Correcting permissions of {path} from {permissions:o} to {wanted:o}"
path, permissions, wanted
)
) )
os.chmod(path, wanted) os.chmod(path, wanted)
async def handle_collection_not_found(config, collection, e=None): async def handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
storage_name = config.get("instance_name", None) storage_name = config.get("instance_name", None)
cli_logger.warning( cli_logger.warning(
@ -339,7 +352,7 @@ async def handle_collection_not_found(config, collection, e=None):
) )
) )
if click.confirm("Should vdirsyncer attempt to create it?"): if implicit_create or click.confirm("Should vdirsyncer attempt to create it?"):
storage_type = config["type"] storage_type = config["type"]
cls, config = storage_class_from_config(config) cls, config = storage_class_from_config(config)
config["collection"] = collection config["collection"] = collection
@ -351,7 +364,7 @@ async def handle_collection_not_found(config, collection, e=None):
cli_logger.error(e) cli_logger.error(e)
raise exceptions.UserError( raise exceptions.UserError(
'Unable to find or create collection "{collection}" for ' f'Unable to find or create collection "{collection}" for '
'storage "{storage}". Please create the collection ' f'storage "{storage_name}". Please create the collection '
"yourself.".format(collection=collection, storage=storage_name) "yourself."
) )

View file

@ -3,6 +3,8 @@ Contains exception classes used by vdirsyncer. Not all exceptions are here,
only the most commonly used ones. only the most commonly used ones.
""" """
from __future__ import annotations
class Error(Exception): class Error(Exception):
"""Baseclass for all errors.""" """Baseclass for all errors."""

View file

@ -1,9 +1,25 @@
from __future__ import annotations
import asyncio
import logging import logging
import os
import platform
import re
from abc import ABC
from abc import abstractmethod
from base64 import b64encode
from ssl import create_default_context from ssl import create_default_context
import aiohttp import aiohttp
import requests.auth
from aiohttp import ServerDisconnectedError
from aiohttp import ServerTimeoutError
from requests.utils import parse_dict_header
from tenacity import retry
from tenacity import retry_if_exception_type
from tenacity import stop_after_attempt
from tenacity import wait_exponential
from . import DOCS_HOME
from . import __version__ from . import __version__
from . import exceptions from . import exceptions
from .utils import expand_path from .utils import expand_path
@ -11,54 +27,101 @@ from .utils import expand_path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
USERAGENT = f"vdirsyncer/{__version__}" USERAGENT = f"vdirsyncer/{__version__}"
# 'hack' to prevent aiohttp from loading the netrc config,
def _detect_faulty_requests(): # pragma: no cover # but still allow it to read PROXY_* env vars.
text = ( # Otherwise, if our host is defined in the netrc config,
"Error during import: {e}\n\n" # aiohttp will overwrite our Authorization header.
"If you have installed vdirsyncer from a distro package, please file " # https://github.com/pimutils/vdirsyncer/issues/1138
"a bug against that package, not vdirsyncer.\n\n" os.environ["NETRC"] = "NUL" if platform.system() == "Windows" else "/dev/null"
"Consult {d}/problems.html#requests-related-importerrors"
"-based-distributions on how to work around this."
)
try:
from requests_toolbelt.auth.guess import GuessAuth # noqa
except ImportError as e:
import sys
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
sys.exit(1)
_detect_faulty_requests() class AuthMethod(ABC):
del _detect_faulty_requests def __init__(self, username, password):
self.username = username
self.password = password
@abstractmethod
def handle_401(self, response):
raise NotImplementedError
@abstractmethod
def get_auth_header(self, method, url):
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, AuthMethod):
return False
return (
self.__class__ == other.__class__
and self.username == other.username
and self.password == other.password
)
class BasicAuthMethod(AuthMethod):
def handle_401(self, _response):
pass
def get_auth_header(self, _method, _url):
auth_str = f"{self.username}:{self.password}"
return "Basic " + b64encode(auth_str.encode("utf-8")).decode("utf-8")
class DigestAuthMethod(AuthMethod):
# make class var to 'cache' the state, which is more efficient because otherwise
# each request would first require another 'initialization' request.
_auth_helpers: dict[tuple[str, str], requests.auth.HTTPDigestAuth] = {}
def __init__(self, username: str, password: str):
super().__init__(username, password)
self._auth_helper = self._auth_helpers.get(
(username, password), requests.auth.HTTPDigestAuth(username, password)
)
self._auth_helpers[(username, password)] = self._auth_helper
@property
def auth_helper_vars(self):
return self._auth_helper._thread_local
def handle_401(self, response):
s_auth = response.headers.get("www-authenticate", "")
if "digest" in s_auth.lower():
# Original source:
# https://github.com/psf/requests/blob/f12ccbef6d6b95564da8d22e280d28c39d53f0e9/src/requests/auth.py#L262-L263
pat = re.compile(r"digest ", flags=re.IGNORECASE)
self.auth_helper_vars.chal = parse_dict_header(pat.sub("", s_auth, count=1))
def get_auth_header(self, method, url):
self._auth_helper.init_per_thread_state()
if not self.auth_helper_vars.chal:
# Need to do init request first
return ""
return self._auth_helper.build_digest_header(method, url)
def prepare_auth(auth, username, password): def prepare_auth(auth, username, password):
if username and password: if username and password:
if auth == "basic" or auth is None: if auth == "basic" or auth is None:
return aiohttp.BasicAuth(username, password) return BasicAuthMethod(username, password)
elif auth == "digest": if auth == "digest":
from requests.auth import HTTPDigestAuth return DigestAuthMethod(username, password)
if auth == "guess":
return HTTPDigestAuth(username, password) raise exceptions.UserError(
elif auth == "guess": "'Guess' authentication is not supported in this version of "
try: "vdirsyncer.\n"
from requests_toolbelt.auth.guess import GuessAuth "Please explicitly specify either 'basic' or 'digest' auth instead. \n"
except ImportError: "See the following issue for more information: "
raise exceptions.UserError( "https://github.com/pimutils/vdirsyncer/issues/1015"
"Your version of requests_toolbelt is too " )
"old for `guess` authentication. At least "
"version 0.4.0 is required."
)
else:
return GuessAuth(username, password)
else: else:
raise exceptions.UserError(f"Unknown authentication method: {auth}") raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth: elif auth:
raise exceptions.UserError( raise exceptions.UserError(
"You need to specify username and password " f"You need to specify username and password for {auth} authentication."
"for {} authentication.".format(auth)
) )
return None return None
@ -92,18 +155,79 @@ def prepare_client_cert(cert):
return cert return cert
class TransientNetworkError(exceptions.Error):
"""Transient network condition that should be retried."""
def _is_safe_to_retry_method(method: str) -> bool:
"""Returns True if the HTTP method is safe/idempotent to retry.
We consider these safe for our WebDAV usage:
- GET, HEAD, OPTIONS: standard safe methods
- PROPFIND, REPORT: read-only DAV queries used for listing/fetching
"""
return method.upper() in {"GET", "HEAD", "OPTIONS", "PROPFIND", "REPORT"}
class UsageLimitReached(exceptions.Error):
pass
async def _is_quota_exceeded_google(response: aiohttp.ClientResponse) -> bool:
"""Return True if the response JSON indicates Google-style `usageLimits` exceeded.
Expected shape:
{"error": {"errors": [{"domain": "usageLimits", ...}], ...}}
See https://developers.google.com/workspace/calendar/api/guides/errors#403_usage_limits_exceeded
"""
try:
data = await response.json(content_type=None)
except Exception:
return False
if not isinstance(data, dict):
return False
error = data.get("error")
if not isinstance(error, dict):
return False
errors = error.get("errors")
if not isinstance(errors, list):
return False
for entry in errors:
if isinstance(entry, dict) and entry.get("domain") == "usageLimits":
return True
return False
@retry(
stop=stop_after_attempt(5),
wait=wait_exponential(multiplier=1, min=4, max=10),
retry=(
retry_if_exception_type(UsageLimitReached)
| retry_if_exception_type(TransientNetworkError)
),
reraise=True,
)
async def request( async def request(
method, method,
url, url,
session, session,
auth=None,
latin1_fallback=True, latin1_fallback=True,
**kwargs, **kwargs,
): ):
"""Wrapper method for requests, to ease logging and mocking. """Wrapper method for requests, to ease logging and mocking as well as to
support auth methods currently unsupported by aiohttp.
Parameters should be the same as for ``aiohttp.request``, as well as: Parameters should be the same as for ``aiohttp.request``, except:
:param session: A requests session object to use. :param session: A requests session object to use.
:param auth: The HTTP ``AuthMethod`` to use for authentication.
:param verify_fingerprint: Optional. SHA256 of the expected server certificate. :param verify_fingerprint: Optional. SHA256 of the expected server certificate.
:param latin1_fallback: RFC-2616 specifies the default Content-Type of :param latin1_fallback: RFC-2616 specifies the default Content-Type of
text/* to be latin1, which is not always correct, but exactly what text/* to be latin1, which is not always correct, but exactly what
@ -122,14 +246,54 @@ async def request(
logger.debug("=" * 20) logger.debug("=" * 20)
logger.debug(f"{method} {url}") logger.debug(f"{method} {url}")
logger.debug(kwargs.get("headers", {})) logger.debug(kwargs.get("headers", {}))
logger.debug(kwargs.get("data", None)) logger.debug(kwargs.get("data"))
logger.debug("Sending request...") logger.debug("Sending request...")
assert isinstance(kwargs.get("data", b""), bytes) assert isinstance(kwargs.get("data", b""), bytes)
kwargs.pop("cert", None) # TODO XXX FIXME! cert = kwargs.pop("cert", None)
if cert is not None:
ssl_context = kwargs.pop("ssl", create_default_context())
ssl_context.load_cert_chain(*cert)
kwargs["ssl"] = ssl_context
response = await session.request(method, url, **kwargs) headers = kwargs.pop("headers", {})
response: aiohttp.ClientResponse | None = None
for _attempt in range(2):
if auth:
headers["Authorization"] = auth.get_auth_header(method, url)
try:
response = await session.request(method, url, headers=headers, **kwargs)
except (
ServerDisconnectedError,
ServerTimeoutError,
asyncio.TimeoutError,
) as e:
# Retry only if the method is safe/idempotent for our DAV use
if _is_safe_to_retry_method(method):
logger.debug(
f"Transient network error on {method} {url}: {e}. Will retry."
)
raise TransientNetworkError(str(e)) from e
raise e from None
if response is None:
raise RuntimeError("No HTTP response obtained")
if response.ok or not auth:
# we don't need to do the 401-loop if we don't do auth in the first place
break
if response.status == 401:
auth.handle_401(response)
# retry once more after handling the 401 challenge
continue
else:
# some other error, will be handled later on
break
if response is None:
raise RuntimeError("No HTTP response obtained")
# See https://github.com/kennethreitz/requests/issues/2042 # See https://github.com/kennethreitz/requests/issues/2042
content_type = response.headers.get("Content-Type", "") content_type = response.headers.get("Content-Type", "")
@ -145,10 +309,18 @@ async def request(
logger.debug(response.headers) logger.debug(response.headers)
logger.debug(response.content) logger.debug(response.content)
if logger.getEffectiveLevel() <= logging.DEBUG and response.status >= 400:
# https://github.com/pimutils/vdirsyncer/issues/1186
logger.debug(await response.text())
if response.status == 403 and await _is_quota_exceeded_google(response):
raise UsageLimitReached(response.reason)
if response.status == 412: if response.status == 412:
raise exceptions.PreconditionFailed(response.reason) raise exceptions.PreconditionFailed(response.reason)
if response.status in (404, 410): if response.status in (404, 410):
raise exceptions.NotFoundError(response.reason) raise exceptions.NotFoundError(response.reason)
if response.status == 429:
raise UsageLimitReached(response.reason)
response.raise_for_status() response.raise_for_status()
return response return response

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import logging import logging
from . import exceptions from . import exceptions
@ -55,7 +57,7 @@ async def metasync(storage_a, storage_b, status, keys, conflict_resolution=None)
logger.debug(f"B: {b}") logger.debug(f"B: {b}")
logger.debug(f"S: {s}") logger.debug(f"S: {s}")
if a != s and b != s or storage_a.read_only or storage_b.read_only: if (a != s and b != s) or storage_a.read_only or storage_b.read_only:
await _resolve_conflict() await _resolve_conflict()
elif a != s and b == s: elif a != s and b == s:
await _a_to_b() await _a_to_b()

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import logging import logging
from os.path import basename from os.path import basename
@ -24,9 +26,9 @@ async def repair_storage(storage, repair_unsafe_uid):
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)
except IrreparableItem: except IrreparableItem:
logger.error( logger.error(
"Item {!r} is malformed beyond repair. " f"Item {href!r} is malformed beyond repair. "
"The PRODID property may indicate which software " "The PRODID property may indicate which software "
"created this item.".format(href) "created this item."
) )
logger.error(f"Item content: {item.raw!r}") logger.error(f"Item content: {item.raw!r}")
continue continue
@ -42,7 +44,7 @@ async def repair_storage(storage, repair_unsafe_uid):
def repair_item(href, item, seen_uids, repair_unsafe_uid): def repair_item(href, item, seen_uids, repair_unsafe_uid):
if item.parsed is None: if item.parsed is None:
raise IrreparableItem() raise IrreparableItem
new_item = item new_item = item
@ -54,14 +56,12 @@ def repair_item(href, item, seen_uids, repair_unsafe_uid):
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
elif not href_safe(item.uid) or not href_safe(basename(href)): elif not href_safe(item.uid) or not href_safe(basename(href)):
if not repair_unsafe_uid: if not repair_unsafe_uid:
logger.warning( logger.warning("UID may cause problems, add --repair-unsafe-uid to repair.")
"UID may cause problems, add " "--repair-unsafe-uid to repair."
)
else: else:
logger.warning("UID or href is unsafe, assigning random UID.") logger.warning("UID or href is unsafe, assigning random UID.")
new_item = item.with_uid(generate_href()) new_item = item.with_uid(generate_href())
if not new_item.uid: if not new_item.uid:
raise IrreparableItem() raise IrreparableItem
return new_item return new_item

View file

@ -1,16 +1,15 @@
from __future__ import annotations
import contextlib import contextlib
import functools import functools
from abc import ABCMeta from abc import ABCMeta
from abc import abstractmethod from abc import abstractmethod
from typing import Iterable from collections.abc import Iterable
from typing import List
from typing import Optional
from vdirsyncer import exceptions
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import exceptions
from ..utils import uniq
def mutating_storage_method(f): def mutating_storage_method(f):
"""Wrap a method and fail if the instance is readonly.""" """Wrap a method and fail if the instance is readonly."""
@ -34,7 +33,6 @@ class StorageMeta(ABCMeta):
class Storage(metaclass=StorageMeta): class Storage(metaclass=StorageMeta):
"""Superclass of all storages, interface that all storages have to """Superclass of all storages, interface that all storages have to
implement. implement.
@ -67,21 +65,37 @@ class Storage(metaclass=StorageMeta):
# The machine-readable name of this collection. # The machine-readable name of this collection.
collection = None collection = None
# A value of False means storage does not support delete requests. A
# value of True mean the storage supports it.
no_delete = False
# A value of True means the storage does not support write-methods such as # A value of True means the storage does not support write-methods such as
# upload, update and delete. A value of False means the storage does # upload, update and delete. A value of False means the storage does
# support those methods. # support those methods.
read_only = False read_only = False
# The attribute values to show in the representation of the storage. # The attribute values to show in the representation of the storage.
_repr_attributes: List[str] = [] _repr_attributes: tuple[str, ...] = ()
def __init__(self, instance_name=None, read_only=None, collection=None): def __init__(
self,
instance_name=None,
read_only=None,
no_delete=None,
collection=None,
):
if read_only is None: if read_only is None:
read_only = self.read_only read_only = self.read_only
if self.read_only and not read_only: if self.read_only and not read_only:
raise exceptions.UserError("This storage can only be read-only.") raise exceptions.UserError("This storage can only be read-only.")
self.read_only = bool(read_only) self.read_only = bool(read_only)
if no_delete is None:
no_delete = self.no_delete
if self.no_delete and not no_delete:
raise exceptions.UserError("Nothing can be deleted in this storage.")
self.no_delete = bool(no_delete)
if collection and instance_name: if collection and instance_name:
instance_name = f"{instance_name}/{collection}" instance_name = f"{instance_name}/{collection}"
self.instance_name = instance_name self.instance_name = instance_name
@ -105,7 +119,7 @@ class Storage(metaclass=StorageMeta):
""" """
if False: if False:
yield # Needs to be an async generator yield # Needs to be an async generator
raise NotImplementedError() raise NotImplementedError
@classmethod @classmethod
async def create_collection(cls, collection, **kwargs): async def create_collection(cls, collection, **kwargs):
@ -117,7 +131,7 @@ class Storage(metaclass=StorageMeta):
The returned args should contain the collection name, for UI purposes. The returned args should contain the collection name, for UI purposes.
""" """
raise NotImplementedError() raise NotImplementedError
def __repr__(self): def __repr__(self):
try: try:
@ -126,19 +140,17 @@ class Storage(metaclass=StorageMeta):
except ValueError: except ValueError:
pass pass
return "<{}(**{})>".format( attrs = {x: getattr(self, x) for x in self._repr_attributes}
self.__class__.__name__, return f"<{self.__class__.__name__}(**{attrs})>"
{x: getattr(self, x) for x in self._repr_attributes},
)
@abstractmethod @abstractmethod
async def list(self) -> List[tuple]: async def list(self) -> list[tuple]:
""" """
:returns: list of (href, etag) :returns: list of (href, etag)
""" """
@abstractmethod @abstractmethod
async def get(self, href: str): async def get(self, href: str) -> tuple[Item, str]:
"""Fetch a single item. """Fetch a single item.
:param href: href to fetch :param href: href to fetch
@ -184,7 +196,7 @@ class Storage(metaclass=StorageMeta):
:returns: (href, etag) :returns: (href, etag)
""" """
raise NotImplementedError() raise NotImplementedError
async def update(self, href: str, item: Item, etag): async def update(self, href: str, item: Item, etag):
"""Update an item. """Update an item.
@ -197,7 +209,7 @@ class Storage(metaclass=StorageMeta):
:returns: etag :returns: etag
""" """
raise NotImplementedError() raise NotImplementedError
async def delete(self, href: str, etag: str): async def delete(self, href: str, etag: str):
"""Delete an item by href. """Delete an item by href.
@ -205,7 +217,7 @@ class Storage(metaclass=StorageMeta):
:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` when item has :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` when item has
a different etag or doesn't exist. a different etag or doesn't exist.
""" """
raise NotImplementedError() raise NotImplementedError
@contextlib.asynccontextmanager @contextlib.asynccontextmanager
async def at_once(self): async def at_once(self):
@ -227,7 +239,7 @@ class Storage(metaclass=StorageMeta):
""" """
yield yield
async def get_meta(self, key: str) -> Optional[str]: async def get_meta(self, key: str) -> str | None:
"""Get metadata value for collection/storage. """Get metadata value for collection/storage.
See the vdir specification for the keys that *have* to be accepted. See the vdir specification for the keys that *have* to be accepted.
@ -237,7 +249,7 @@ class Storage(metaclass=StorageMeta):
""" """
raise NotImplementedError("This storage does not support metadata.") raise NotImplementedError("This storage does not support metadata.")
async def set_meta(self, key: str, value: Optional[str]): async def set_meta(self, key: str, value: str | None):
"""Set metadata value for collection/storage. """Set metadata value for collection/storage.
:param key: The metadata key. :param key: The metadata key.
@ -246,7 +258,7 @@ class Storage(metaclass=StorageMeta):
raise NotImplementedError("This storage does not support metadata.") raise NotImplementedError("This storage does not support metadata.")
def normalize_meta_value(value) -> Optional[str]: def normalize_meta_value(value) -> str | None:
# `None` is returned by iCloud for empty properties. # `None` is returned by iCloud for empty properties.
if value is None or value == "None": if value is None or value == "None":
return None return None

View file

@ -1,26 +1,28 @@
from __future__ import annotations
import contextlib
import datetime import datetime
import logging import logging
import urllib.parse as urlparse import urllib.parse as urlparse
import xml.etree.ElementTree as etree import xml.etree.ElementTree as etree
from abc import abstractmethod from abc import abstractmethod
from functools import cached_property
from inspect import getfullargspec from inspect import getfullargspec
from inspect import signature from inspect import signature
from typing import Optional
from typing import Type
import aiohttp import aiohttp
import aiostream import aiostream
from vdirsyncer import exceptions
from vdirsyncer import http
from vdirsyncer import utils
from vdirsyncer.exceptions import Error from vdirsyncer.exceptions import Error
from vdirsyncer.http import USERAGENT
from vdirsyncer.http import prepare_auth
from vdirsyncer.http import prepare_client_cert
from vdirsyncer.http import prepare_verify
from vdirsyncer.vobject import Item from vdirsyncer.vobject import Item
from .. import exceptions
from .. import http
from .. import utils
from ..http import USERAGENT
from ..http import prepare_auth
from ..http import prepare_client_cert
from ..http import prepare_verify
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -92,8 +94,7 @@ def _parse_xml(content):
return etree.XML(_clean_body(content)) return etree.XML(_clean_body(content))
except etree.ParseError as e: except etree.ParseError as e:
raise InvalidXMLResponse( raise InvalidXMLResponse(
"Invalid XML encountered: {}\n" f"Invalid XML encountered: {e}\nDouble-check the URLs in your config."
"Double-check the URLs in your config.".format(e)
) )
@ -114,10 +115,8 @@ def _fuzzy_matches_mimetype(strict, weak):
if strict is None or weak is None: if strict is None or weak is None:
return True return True
mediatype, subtype = strict.split("/") _mediatype, subtype = strict.split("/")
if subtype in weak: return subtype in weak
return True
return False
class Discover: class Discover:
@ -128,7 +127,7 @@ class Discover:
@property @property
@abstractmethod @abstractmethod
def _resourcetype(self) -> Optional[str]: def _resourcetype(self) -> str | None:
pass pass
@property @property
@ -173,7 +172,7 @@ class Discover:
dav_logger.debug("Trying out well-known URI") dav_logger.debug("Trying out well-known URI")
return await self._find_principal_impl(self._well_known_uri) return await self._find_principal_impl(self._well_known_uri)
async def _find_principal_impl(self, url): async def _find_principal_impl(self, url) -> str:
headers = self.session.get_default_headers() headers = self.session.get_default_headers()
headers["Depth"] = "0" headers["Depth"] = "0"
body = b""" body = b"""
@ -198,11 +197,9 @@ class Discover:
# E.g. Synology NAS # E.g. Synology NAS
# See https://github.com/pimutils/vdirsyncer/issues/498 # See https://github.com/pimutils/vdirsyncer/issues/498
dav_logger.debug( dav_logger.debug(
"No current-user-principal returned, re-using URL {}".format( f"No current-user-principal returned, re-using URL {response.url}"
response.url
)
) )
return response.url return response.url.human_repr()
return urlparse.urljoin(str(response.url), rv.text).rstrip("/") + "/" return urlparse.urljoin(str(response.url), rv.text).rstrip("/") + "/"
async def find_home(self): async def find_home(self):
@ -222,10 +219,8 @@ class Discover:
async def find_collections(self): async def find_collections(self):
rv = None rv = None
try: with contextlib.suppress(aiohttp.ClientResponseError, exceptions.Error):
rv = await aiostream.stream.list(self._find_collections_impl("")) rv = await aiostream.stream.list(self._find_collections_impl(""))
except (aiohttp.ClientResponseError, exceptions.Error):
pass
if rv: if rv:
return rv return rv
@ -240,7 +235,7 @@ class Discover:
return True return True
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop")) props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
if props is None or not len(props): if props is None or not props:
dav_logger.debug("Skipping, missing <prop>: %s", response) dav_logger.debug("Skipping, missing <prop>: %s", response)
return False return False
if props.find("{DAV:}resourcetype/" + self._resourcetype) is None: if props.find("{DAV:}resourcetype/" + self._resourcetype) is None:
@ -264,7 +259,7 @@ class Discover:
href = response.find("{DAV:}href") href = response.find("{DAV:}href")
if href is None: if href is None:
raise InvalidXMLResponse("Missing href tag for collection " "props.") raise InvalidXMLResponse("Missing href tag for collection props.")
href = urlparse.urljoin(str(r.url), href.text) href = urlparse.urljoin(str(r.url), href.text)
if href not in done: if href not in done:
done.add(href) done.add(href)
@ -313,9 +308,7 @@ class Discover:
</mkcol> </mkcol>
""".format( """.format(
etree.tostring(etree.Element(self._resourcetype), encoding="unicode") etree.tostring(etree.Element(self._resourcetype), encoding="unicode")
).encode( ).encode("utf-8")
"utf-8"
)
response = await self.session.request( response = await self.session.request(
"MKCOL", "MKCOL",
@ -328,7 +321,7 @@ class Discover:
class CalDiscover(Discover): class CalDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:caldav" _namespace = "urn:ietf:params:xml:ns:caldav"
_resourcetype = "{%s}calendar" % _namespace _resourcetype = f"{{{_namespace}}}calendar"
_homeset_xml = b""" _homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav"> <propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav">
<prop> <prop>
@ -336,13 +329,13 @@ class CalDiscover(Discover):
</prop> </prop>
</propfind> </propfind>
""" """
_homeset_tag = "{%s}calendar-home-set" % _namespace _homeset_tag = f"{{{_namespace}}}calendar-home-set"
_well_known_uri = "/.well-known/caldav" _well_known_uri = "/.well-known/caldav"
class CardDiscover(Discover): class CardDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:carddav" _namespace = "urn:ietf:params:xml:ns:carddav"
_resourcetype: Optional[str] = "{%s}addressbook" % _namespace _resourcetype: str | None = f"{{{_namespace}}}addressbook"
_homeset_xml = b""" _homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav"> <propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav">
<prop> <prop>
@ -350,7 +343,7 @@ class CardDiscover(Discover):
</prop> </prop>
</propfind> </propfind>
""" """
_homeset_tag = "{%s}addressbook-home-set" % _namespace _homeset_tag = f"{{{_namespace}}}addressbook-home-set"
_well_known_uri = "/.well-known/carddav" _well_known_uri = "/.well-known/carddav"
@ -398,7 +391,7 @@ class DAVSession:
self.url = url.rstrip("/") + "/" self.url = url.rstrip("/") + "/"
self.connector = connector self.connector = connector
@utils.cached_property @cached_property
def parsed_url(self): def parsed_url(self):
return urlparse.urlparse(self.url) return urlparse.urlparse(self.url)
@ -422,6 +415,7 @@ class DAVSession:
return aiohttp.ClientSession( return aiohttp.ClientSession(
connector=self.connector, connector=self.connector,
connector_owner=False, connector_owner=False,
trust_env=True,
# TODO use `raise_for_status=true`, though this needs traces first, # TODO use `raise_for_status=true`, though this needs traces first,
) )
@ -450,7 +444,7 @@ class DAVStorage(Storage):
@property @property
@abstractmethod @abstractmethod
def discovery_class(self) -> Type[Discover]: def discovery_class(self) -> type[Discover]:
"""Discover subclass to use.""" """Discover subclass to use."""
# The DAVSession class to use # The DAVSession class to use
@ -458,7 +452,7 @@ class DAVStorage(Storage):
connector: aiohttp.TCPConnector connector: aiohttp.TCPConnector
_repr_attributes = ["username", "url"] _repr_attributes = ("username", "url")
_property_table = { _property_table = {
"displayname": ("displayname", "DAV:"), "displayname": ("displayname", "DAV:"),
@ -503,8 +497,12 @@ class DAVStorage(Storage):
def _is_item_mimetype(self, mimetype): def _is_item_mimetype(self, mimetype):
return _fuzzy_matches_mimetype(self.item_mimetype, mimetype) return _fuzzy_matches_mimetype(self.item_mimetype, mimetype)
async def get(self, href: str): async def get(self, href: str) -> tuple[Item, str]:
((actual_href, item, etag),) = await aiostream.stream.list( actual_href: str
item: Item
etag: str
((actual_href, item, etag),) = await aiostream.stream.list( # type: ignore[misc]
self.get_multi([href]) self.get_multi([href])
) )
assert href == actual_href assert href == actual_href
@ -630,7 +628,7 @@ class DAVStorage(Storage):
continue continue
props = response.findall("{DAV:}propstat/{DAV:}prop") props = response.findall("{DAV:}propstat/{DAV:}prop")
if props is None or not len(props): if props is None or not props:
dav_logger.debug(f"Skipping {href!r}, properties are missing.") dav_logger.debug(f"Skipping {href!r}, properties are missing.")
continue continue
else: else:
@ -648,9 +646,7 @@ class DAVStorage(Storage):
contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None) contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None)
if not self._is_item_mimetype(contenttype): if not self._is_item_mimetype(contenttype):
dav_logger.debug( dav_logger.debug(
"Skipping {!r}, {!r} != {!r}.".format( f"Skipping {href!r}, {contenttype!r} != {self.item_mimetype!r}."
href, contenttype, self.item_mimetype
)
) )
continue continue
@ -685,11 +681,11 @@ class DAVStorage(Storage):
for href, etag, _prop in rv: for href, etag, _prop in rv:
yield href, etag yield href, etag
async def get_meta(self, key) -> Optional[str]: async def get_meta(self, key) -> str | None:
try: try:
tagname, namespace = self._property_table[key] tagname, namespace = self._property_table[key]
except KeyError: except KeyError:
raise exceptions.UnsupportedMetadataError() raise exceptions.UnsupportedMetadataError
xpath = f"{{{namespace}}}{tagname}" xpath = f"{{{namespace}}}{tagname}"
body = f"""<?xml version="1.0" encoding="utf-8" ?> body = f"""<?xml version="1.0" encoding="utf-8" ?>
@ -723,7 +719,7 @@ class DAVStorage(Storage):
try: try:
tagname, namespace = self._property_table[key] tagname, namespace = self._property_table[key]
except KeyError: except KeyError:
raise exceptions.UnsupportedMetadataError() raise exceptions.UnsupportedMetadataError
lxml_selector = f"{{{namespace}}}{tagname}" lxml_selector = f"{{{namespace}}}{tagname}"
element = etree.Element(lxml_selector) element = etree.Element(lxml_selector)
@ -744,9 +740,7 @@ class DAVStorage(Storage):
""".format( """.format(
etree.tostring(element, encoding="unicode"), etree.tostring(element, encoding="unicode"),
action=action, action=action,
).encode( ).encode("utf-8")
"utf-8"
)
await self.session.request( await self.session.request(
"PROPPATCH", "PROPPATCH",
@ -800,7 +794,7 @@ class CalDAVStorage(DAVStorage):
self.item_types = tuple(item_types) self.item_types = tuple(item_types)
if (start_date is None) != (end_date is None): if (start_date is None) != (end_date is None):
raise exceptions.UserError( raise exceptions.UserError(
"If start_date is given, " "end_date has to be given too." "If start_date is given, end_date has to be given too."
) )
elif start_date is not None and end_date is not None: elif start_date is not None and end_date is not None:
namespace = dict(datetime.__dict__) namespace = dict(datetime.__dict__)
@ -830,9 +824,7 @@ class CalDAVStorage(DAVStorage):
start = start.strftime(CALDAV_DT_FORMAT) start = start.strftime(CALDAV_DT_FORMAT)
end = end.strftime(CALDAV_DT_FORMAT) end = end.strftime(CALDAV_DT_FORMAT)
timefilter = '<C:time-range start="{start}" end="{end}"/>'.format( timefilter = f'<C:time-range start="{start}" end="{end}"/>'
start=start, end=end
)
else: else:
timefilter = "" timefilter = ""
@ -900,14 +892,21 @@ class CardDAVStorage(DAVStorage):
item_mimetype = "text/vcard" item_mimetype = "text/vcard"
discovery_class = CardDiscover discovery_class = CardDiscover
get_multi_template = """<?xml version="1.0" encoding="utf-8" ?> def __init__(self, *args, use_vcard_4=False, **kwargs):
self.use_vcard_4 = use_vcard_4
super().__init__(*args, **kwargs)
@property
def get_multi_template(self):
ct = 'Content-Type="text/vcard" version="4.0"' if self.use_vcard_4 else ""
return f"""<?xml version="1.0" encoding="utf-8" ?>
<C:addressbook-multiget xmlns="DAV:" <C:addressbook-multiget xmlns="DAV:"
xmlns:C="urn:ietf:params:xml:ns:carddav"> xmlns:C="urn:ietf:params:xml:ns:carddav">
<prop> <prop>
<getetag/> <getetag/>
<C:address-data/> <C:address-data {ct}/>
</prop> </prop>
{hrefs} {{hrefs}}
</C:addressbook-multiget>""" </C:addressbook-multiget>"""
get_multi_data_query = "{urn:ietf:params:xml:ns:carddav}address-data" get_multi_data_query = "{urn:ietf:params:xml:ns:carddav}address-data"

View file

@ -1,16 +1,19 @@
from __future__ import annotations
import contextlib
import errno import errno
import logging import logging
import os import os
import subprocess import subprocess
from atomicwrites import atomic_write from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkdir
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import generate_href
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.vobject import Item
from .. import exceptions
from ..utils import checkdir
from ..utils import expand_path
from ..utils import generate_href
from ..utils import get_etag_from_file
from ..vobject import Item
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -18,9 +21,8 @@ logger = logging.getLogger(__name__)
class FilesystemStorage(Storage): class FilesystemStorage(Storage):
storage_name = "filesystem" storage_name = "filesystem"
_repr_attributes = ["path"] _repr_attributes = ("path",)
def __init__( def __init__(
self, self,
@ -28,6 +30,7 @@ class FilesystemStorage(Storage):
fileext, fileext,
encoding="utf-8", encoding="utf-8",
post_hook=None, post_hook=None,
pre_deletion_hook=None,
fileignoreext=".tmp", fileignoreext=".tmp",
**kwargs, **kwargs,
): ):
@ -39,6 +42,7 @@ class FilesystemStorage(Storage):
self.fileext = fileext self.fileext = fileext
self.fileignoreext = fileignoreext self.fileignoreext = fileignoreext
self.post_hook = post_hook self.post_hook = post_hook
self.pre_deletion_hook = pre_deletion_hook
@classmethod @classmethod
async def discover(cls, path, **kwargs): async def discover(cls, path, **kwargs):
@ -62,9 +66,7 @@ class FilesystemStorage(Storage):
def _validate_collection(cls, path): def _validate_collection(cls, path):
if not os.path.isdir(path): if not os.path.isdir(path):
return False return False
if os.path.basename(path).startswith("."): return not os.path.basename(path).startswith(".")
return False
return True
@classmethod @classmethod
async def create_collection(cls, collection, **kwargs): async def create_collection(cls, collection, **kwargs):
@ -96,7 +98,7 @@ class FilesystemStorage(Storage):
): ):
yield fname, get_etag_from_file(fpath) yield fname, get_etag_from_file(fpath)
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
fpath = self._get_filepath(href) fpath = self._get_filepath(href)
try: try:
with open(fpath, "rb") as f: with open(fpath, "rb") as f:
@ -165,6 +167,9 @@ class FilesystemStorage(Storage):
actual_etag = get_etag_from_file(fpath) actual_etag = get_etag_from_file(fpath)
if etag != actual_etag: if etag != actual_etag:
raise exceptions.WrongEtagError(etag, actual_etag) raise exceptions.WrongEtagError(etag, actual_etag)
if self.pre_deletion_hook:
self._run_pre_deletion_hook(fpath)
os.remove(fpath) os.remove(fpath)
def _run_post_hook(self, fpath): def _run_post_hook(self, fpath):
@ -172,7 +177,16 @@ class FilesystemStorage(Storage):
try: try:
subprocess.call([self.post_hook, fpath]) subprocess.call([self.post_hook, fpath])
except OSError as e: except OSError as e:
logger.warning(f"Error executing external hook: {str(e)}") logger.warning(f"Error executing external hook: {e!s}")
def _run_pre_deletion_hook(self, fpath):
logger.info(
f"Calling pre_deletion_hook={self.pre_deletion_hook} with argument={fpath}"
)
try:
subprocess.call([self.pre_deletion_hook, fpath])
except OSError as e:
logger.warning(f"Error executing external hook: {e!s}")
async def get_meta(self, key): async def get_meta(self, key):
fpath = os.path.join(self.path, key) fpath = os.path.join(self.path, key)
@ -190,10 +204,8 @@ class FilesystemStorage(Storage):
fpath = os.path.join(self.path, key) fpath = os.path.join(self.path, key)
if value is None: if value is None:
try: with contextlib.suppress(OSError):
os.remove(fpath) os.remove(fpath)
except OSError:
pass
else: else:
with atomic_write(fpath, mode="wb", overwrite=True) as f: with atomic_write(fpath, mode="wb", overwrite=True) as f:
f.write(value.encode(self.encoding)) f.write(value.encode(self.encoding))

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import json import json
import logging import logging
import os import os
@ -9,12 +11,13 @@ from threading import Thread
import aiohttp import aiohttp
import click import click
from atomicwrites import atomic_write
from .. import exceptions from vdirsyncer import exceptions
from ..utils import checkdir from vdirsyncer.utils import atomic_write
from ..utils import expand_path from vdirsyncer.utils import checkdir
from ..utils import open_graphical_browser from vdirsyncer.utils import expand_path
from vdirsyncer.utils import open_graphical_browser
from . import base from . import base
from . import dav from . import dav
from .google_helpers import _RedirectWSGIApp from .google_helpers import _RedirectWSGIApp
@ -96,6 +99,7 @@ class GoogleSession(dav.DAVSession):
token_updater=self._save_token, token_updater=self._save_token,
connector=self.connector, connector=self.connector,
connector_owner=False, connector_owner=False,
trust_env=True,
) )
async def _init_token(self): async def _init_token(self):
@ -106,8 +110,8 @@ class GoogleSession(dav.DAVSession):
pass pass
except ValueError as e: except ValueError as e:
raise exceptions.UserError( raise exceptions.UserError(
"Failed to load token file {}, try deleting it. " f"Failed to load token file {self._token_file}, try deleting it. "
"Original error: {}".format(self._token_file, e) f"Original error: {e}"
) )
if not self._token: if not self._token:
@ -126,7 +130,7 @@ class GoogleSession(dav.DAVSession):
async with self._session as session: async with self._session as session:
# Fail fast if the address is occupied # Fail fast if the address is occupied
authorization_url, state = session.authorization_url( authorization_url, _state = session.authorization_url(
TOKEN_URL, TOKEN_URL,
# access_type and approval_prompt are Google specific # access_type and approval_prompt are Google specific
# extra parameters. # extra parameters.
@ -190,7 +194,7 @@ class GoogleCalendarStorage(dav.CalDAVStorage):
**kwargs, **kwargs,
): ):
if not kwargs.get("collection"): if not kwargs.get("collection"):
raise exceptions.CollectionRequired() raise exceptions.CollectionRequired
super().__init__( super().__init__(
token_file=token_file, token_file=token_file,
@ -228,7 +232,7 @@ class GoogleContactsStorage(dav.CardDAVStorage):
def __init__(self, token_file, client_id, client_secret, **kwargs): def __init__(self, token_file, client_id, client_secret, **kwargs):
if not kwargs.get("collection"): if not kwargs.get("collection"):
raise exceptions.CollectionRequired() raise exceptions.CollectionRequired
super().__init__( super().__init__(
token_file=token_file, token_file=token_file,

View file

@ -2,15 +2,14 @@
# #
# Based on: # Based on:
# https://github.com/googleapis/google-auth-library-python-oauthlib/blob/1fb16be1bad9050ee29293541be44e41e82defd7/google_auth_oauthlib/flow.py#L513 # https://github.com/googleapis/google-auth-library-python-oauthlib/blob/1fb16be1bad9050ee29293541be44e41e82defd7/google_auth_oauthlib/flow.py#L513
from __future__ import annotations
import logging import logging
import wsgiref.simple_server import wsgiref.simple_server
import wsgiref.util import wsgiref.util
from collections.abc import Iterable
from typing import Any from typing import Any
from typing import Callable from typing import Callable
from typing import Dict
from typing import Iterable
from typing import Optional
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,7 +28,7 @@ class _RedirectWSGIApp:
Stores the request URI and displays the given success message. Stores the request URI and displays the given success message.
""" """
last_request_uri: Optional[str] last_request_uri: str | None
def __init__(self, success_message: str): def __init__(self, success_message: str):
""" """
@ -41,7 +40,7 @@ class _RedirectWSGIApp:
def __call__( def __call__(
self, self,
environ: Dict[str, Any], environ: dict[str, Any],
start_response: Callable[[str, list], None], start_response: Callable[[str, list], None],
) -> Iterable[bytes]: ) -> Iterable[bytes]:
"""WSGI Callable. """WSGI Callable.

View file

@ -1,22 +1,29 @@
from __future__ import annotations
import logging
import subprocess
import urllib.parse as urlparse import urllib.parse as urlparse
import aiohttp import aiohttp
from .. import exceptions from vdirsyncer import exceptions
from ..http import USERAGENT from vdirsyncer.http import USERAGENT
from ..http import prepare_auth from vdirsyncer.http import prepare_auth
from ..http import prepare_client_cert from vdirsyncer.http import prepare_client_cert
from ..http import prepare_verify from vdirsyncer.http import prepare_verify
from ..http import request from vdirsyncer.http import request
from ..vobject import Item from vdirsyncer.vobject import Item
from ..vobject import split_collection from vdirsyncer.vobject import split_collection
from .base import Storage from .base import Storage
logger = logging.getLogger(__name__)
class HttpStorage(Storage): class HttpStorage(Storage):
storage_name = "http" storage_name = "http"
read_only = True read_only = True
_repr_attributes = ["username", "url"] _repr_attributes = ("username", "url")
_items = None _items = None
# Required for tests. # Required for tests.
@ -32,9 +39,10 @@ class HttpStorage(Storage):
useragent=USERAGENT, useragent=USERAGENT,
verify_fingerprint=None, verify_fingerprint=None,
auth_cert=None, auth_cert=None,
filter_hook=None,
*, *,
connector, connector,
**kwargs **kwargs,
) -> None: ) -> None:
super().__init__(**kwargs) super().__init__(**kwargs)
@ -54,6 +62,7 @@ class HttpStorage(Storage):
self.useragent = useragent self.useragent = useragent
assert connector is not None assert connector is not None
self.connector = connector self.connector = connector
self._filter_hook = filter_hook
collection = kwargs.get("collection") collection = kwargs.get("collection")
if collection is not None: if collection is not None:
@ -64,10 +73,24 @@ class HttpStorage(Storage):
def _default_headers(self): def _default_headers(self):
return {"User-Agent": self.useragent} return {"User-Agent": self.useragent}
def _run_filter_hook(self, raw_item):
try:
result = subprocess.run(
[self._filter_hook],
input=raw_item,
capture_output=True,
encoding="utf-8",
)
return result.stdout
except OSError as e:
logger.warning(f"Error executing external command: {e!s}")
return raw_item
async def list(self): async def list(self):
async with aiohttp.ClientSession( async with aiohttp.ClientSession(
connector=self.connector, connector=self.connector,
connector_owner=False, connector_owner=False,
trust_env=True,
# TODO use `raise_for_status=true`, though this needs traces first, # TODO use `raise_for_status=true`, though this needs traces first,
) as session: ) as session:
r = await request( r = await request(
@ -79,8 +102,13 @@ class HttpStorage(Storage):
) )
self._items = {} self._items = {}
for item in split_collection((await r.read()).decode("utf-8")): for raw_item in split_collection((await r.read()).decode("utf-8")):
item = Item(item) if self._filter_hook:
raw_item = self._run_filter_hook(raw_item)
if not raw_item:
continue
item = Item(raw_item)
if self._ignore_uids: if self._ignore_uids:
item = item.with_uid(item.hash) item = item.with_uid(item.hash)
@ -89,11 +117,12 @@ class HttpStorage(Storage):
for href, (_, etag) in self._items.items(): for href, (_, etag) in self._items.items():
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
if self._items is None: if self._items is None:
async for _ in self.list(): async for _ in self.list():
pass pass
assert self._items is not None # type assertion
try: try:
return self._items[href] return self._items[href]
except KeyError: except KeyError:

View file

@ -1,6 +1,10 @@
from __future__ import annotations
import random import random
from .. import exceptions from vdirsyncer import exceptions
from vdirsyncer.vobject import Item
from .base import Storage from .base import Storage
from .base import normalize_meta_value from .base import normalize_meta_value
@ -10,7 +14,6 @@ def _random_string():
class MemoryStorage(Storage): class MemoryStorage(Storage):
storage_name = "memory" storage_name = "memory"
""" """
@ -19,7 +22,7 @@ class MemoryStorage(Storage):
def __init__(self, fileext="", **kwargs): def __init__(self, fileext="", **kwargs):
if kwargs.get("collection") is not None: if kwargs.get("collection") is not None:
raise exceptions.UserError("MemoryStorage does not support " "collections.") raise exceptions.UserError("MemoryStorage does not support collections.")
self.items = {} # href => (etag, item) self.items = {} # href => (etag, item)
self.metadata = {} self.metadata = {}
self.fileext = fileext self.fileext = fileext
@ -32,7 +35,7 @@ class MemoryStorage(Storage):
for href, (etag, _item) in self.items.items(): for href, (etag, _item) in self.items.items():
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
etag, item = self.items[href] etag, item = self.items[href]
return item, etag return item, etag

View file

@ -1,25 +1,36 @@
from __future__ import annotations
import collections import collections
import contextlib import contextlib
import functools import functools
import glob import glob
import logging import logging
import os import os
from collections.abc import Iterable
from atomicwrites import atomic_write from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkfile
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from vdirsyncer.vobject import join_collection
from vdirsyncer.vobject import split_collection
from .. import exceptions
from ..utils import checkfile
from ..utils import expand_path
from ..utils import get_etag_from_file
from ..vobject import Item
from ..vobject import join_collection
from ..vobject import split_collection
from .base import Storage from .base import Storage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _writing_op(f): def _writing_op(f):
"""Implement at_once for write operations.
Wrap an operation which writes to the storage, implementing `at_once` if it has been
requested. Changes are stored in-memory until the at_once block finishes, at which
time they are all written at once.
"""
@functools.wraps(f) @functools.wraps(f)
async def inner(self, *args, **kwargs): async def inner(self, *args, **kwargs):
if self._items is None or not self._at_once: if self._items is None or not self._at_once:
@ -36,7 +47,7 @@ def _writing_op(f):
class SingleFileStorage(Storage): class SingleFileStorage(Storage):
storage_name = "singlefile" storage_name = "singlefile"
_repr_attributes = ["path"] _repr_attributes = ("path",)
_write_mode = "wb" _write_mode = "wb"
_append_mode = "ab" _append_mode = "ab"
@ -65,7 +76,7 @@ class SingleFileStorage(Storage):
except TypeError: except TypeError:
# If not exactly one '%s' is present, we cannot discover # If not exactly one '%s' is present, we cannot discover
# collections because we wouldn't know which name to assign. # collections because we wouldn't know which name to assign.
raise NotImplementedError() raise NotImplementedError
placeholder_pos = path.index("%s") placeholder_pos = path.index("%s")
@ -91,7 +102,7 @@ class SingleFileStorage(Storage):
path = path % (collection,) path = path % (collection,)
except TypeError: except TypeError:
raise ValueError( raise ValueError(
"Exactly one %s required in path " "if collection is not null." "Exactly one %s required in path if collection is not null."
) )
checkfile(path, create=True) checkfile(path, create=True)
@ -122,16 +133,23 @@ class SingleFileStorage(Storage):
yield href, etag yield href, etag
async def get(self, href): async def get(self, href) -> tuple[Item, str]:
if self._items is None or not self._at_once: if self._items is None or not self._at_once:
async for _ in self.list(): async for _ in self.list():
pass pass
assert self._items is not None # type assertion
try: try:
return self._items[href] return self._items[href]
except KeyError: except KeyError:
raise exceptions.NotFoundError(href) raise exceptions.NotFoundError(href)
async def get_multi(self, hrefs: Iterable[str]):
async with self.at_once():
for href in uniq(hrefs):
item, etag = await self.get(href)
yield href, item, etag
@_writing_op @_writing_op
async def upload(self, item): async def upload(self, item):
href = item.ident href = item.ident
@ -169,11 +187,9 @@ class SingleFileStorage(Storage):
self.path self.path
): ):
raise exceptions.PreconditionFailed( raise exceptions.PreconditionFailed(
( f"Some other program modified the file {self.path!r}. Re-run the "
"Some other program modified the file {!r}. Re-run the " "synchronization and make sure absolutely no other program is "
"synchronization and make sure absolutely no other program is " "writing into the same file."
"writing into the same file."
).format(self.path)
) )
text = join_collection(item.raw for item, etag in self._items.values()) text = join_collection(item.raw for item, etag in self._items.values())
try: try:
@ -185,7 +201,8 @@ class SingleFileStorage(Storage):
@contextlib.asynccontextmanager @contextlib.asynccontextmanager
async def at_once(self): async def at_once(self):
self.list() async for _ in self.list():
pass
self._at_once = True self._at_once = True
try: try:
yield self yield self

View file

@ -9,18 +9,25 @@ Yang: http://blog.ezyang.com/2012/08/how-offlineimap-works/
Some modifications to it are explained in Some modifications to it are explained in
https://unterwaditzer.net/2016/sync-algorithm.html https://unterwaditzer.net/2016/sync-algorithm.html
""" """
from __future__ import annotations
import contextlib import contextlib
import itertools import itertools
import logging import logging
from ..exceptions import UserError from vdirsyncer.exceptions import UserError
from ..utils import uniq from vdirsyncer.storage.base import Storage
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from .exceptions import BothReadOnly from .exceptions import BothReadOnly
from .exceptions import IdentAlreadyExists from .exceptions import IdentAlreadyExists
from .exceptions import PartialSync from .exceptions import PartialSync
from .exceptions import StorageEmpty from .exceptions import StorageEmpty
from .exceptions import SyncConflict from .exceptions import SyncConflict
from .status import ItemMetadata from .status import ItemMetadata
from .status import SqliteStatus
from .status import SubStatus from .status import SubStatus
sync_logger = logging.getLogger(__name__) sync_logger = logging.getLogger(__name__)
@ -30,22 +37,22 @@ class _StorageInfo:
"""A wrapper class that holds prefetched items, the status and other """A wrapper class that holds prefetched items, the status and other
things.""" things."""
def __init__(self, storage, status): def __init__(self, storage: Storage, status: SubStatus):
self.storage = storage self.storage = storage
self.status = status self.status = status
self._item_cache = {} self._item_cache = {} # type: ignore[var-annotated]
async def prepare_new_status(self): async def prepare_new_status(self) -> bool:
storage_nonempty = False storage_nonempty = False
prefetch = [] prefetch = []
def _store_props(ident, props): def _store_props(ident: str, props: ItemMetadata) -> None:
try: try:
self.status.insert_ident(ident, props) self.status.insert_ident(ident, props)
except IdentAlreadyExists as e: except IdentAlreadyExists as e:
raise e.to_ident_conflict(self.storage) raise e.to_ident_conflict(self.storage)
async for href, etag in self.storage.list(): async for href, etag in self.storage.list(): # type: ignore[attr-defined]
storage_nonempty = True storage_nonempty = True
ident, meta = self.status.get_by_href(href) ident, meta = self.status.get_by_href(href)
@ -68,7 +75,7 @@ class _StorageInfo:
return storage_nonempty return storage_nonempty
def is_changed(self, ident): def is_changed(self, ident: str) -> bool:
old_meta = self.status.get(ident) old_meta = self.status.get(ident)
if old_meta is None: # new item if old_meta is None: # new item
return True return True
@ -81,30 +88,28 @@ class _StorageInfo:
and (old_meta.hash is None or new_meta.hash != old_meta.hash) and (old_meta.hash is None or new_meta.hash != old_meta.hash)
) )
def set_item_cache(self, ident, item): def set_item_cache(self, ident, item) -> None:
actual_hash = self.status.get_new(ident).hash actual_hash = self.status.get_new(ident).hash
assert actual_hash == item.hash assert actual_hash == item.hash
self._item_cache[ident] = item self._item_cache[ident] = item
def get_item_cache(self, ident): def get_item_cache(self, ident: str) -> Item:
return self._item_cache[ident] return self._item_cache[ident]
async def sync( async def sync(
storage_a, storage_a: Storage,
storage_b, storage_b: Storage,
status, status: SqliteStatus,
conflict_resolution=None, conflict_resolution=None,
force_delete=False, force_delete=False,
error_callback=None, error_callback=None,
partial_sync="revert", partial_sync="revert",
): ) -> None:
"""Synchronizes two storages. """Synchronizes two storages.
:param storage_a: The first storage :param storage_a: The first storage
:type storage_a: :class:`vdirsyncer.storage.base.Storage`
:param storage_b: The second storage :param storage_b: The second storage
:type storage_b: :class:`vdirsyncer.storage.base.Storage`
:param status: {ident: (href_a, etag_a, href_b, etag_b)} :param status: {ident: (href_a, etag_a, href_b, etag_b)}
metadata about the two storages for detection of changes. Will be metadata about the two storages for detection of changes. Will be
modified by the function and should be passed to it at the next sync. modified by the function and should be passed to it at the next sync.
@ -128,12 +133,16 @@ async def sync(
- ``revert`` (default): Revert changes on other side. - ``revert`` (default): Revert changes on other side.
""" """
if storage_a.read_only and storage_b.read_only: if storage_a.read_only and storage_b.read_only:
raise BothReadOnly() raise BothReadOnly
if conflict_resolution == "a wins": if conflict_resolution == "a wins":
conflict_resolution = lambda a, b: a # noqa: E731
def conflict_resolution(a, b):
return a
elif conflict_resolution == "b wins": elif conflict_resolution == "b wins":
conflict_resolution = lambda a, b: b # noqa: E731
def conflict_resolution(a, b):
return b
status_nonempty = bool(next(status.iter_old(), None)) status_nonempty = bool(next(status.iter_old(), None))
@ -165,7 +174,7 @@ async def sync(
class Action: class Action:
async def _run_impl(self, a, b): # pragma: no cover async def _run_impl(self, a, b): # pragma: no cover
raise NotImplementedError() raise NotImplementedError
async def run(self, a, b, conflict_resolution, partial_sync): async def run(self, a, b, conflict_resolution, partial_sync):
with self.auto_rollback(a, b): with self.auto_rollback(a, b):
@ -199,14 +208,11 @@ class Upload(Action):
self.dest = dest self.dest = dest
async def _run_impl(self, a, b): async def _run_impl(self, a, b):
if self.dest.storage.read_only: if self.dest.storage.read_only:
href = etag = None href = etag = None
else: else:
sync_logger.info( sync_logger.info(
"Copying (uploading) item {} to {}".format( f"Copying (uploading) item {self.ident} to {self.dest.storage}"
self.ident, self.dest.storage
)
) )
href, etag = await self.dest.storage.upload(self.item) href, etag = await self.dest.storage.upload(self.item)
assert href is not None assert href is not None
@ -242,7 +248,11 @@ class Delete(Action):
async def _run_impl(self, a, b): async def _run_impl(self, a, b):
meta = self.dest.status.get_new(self.ident) meta = self.dest.status.get_new(self.ident)
if not self.dest.storage.read_only: if self.dest.storage.read_only or self.dest.storage.no_delete:
sync_logger.debug(
f"Skipping deletion of item {self.ident} from {self.dest.storage}"
)
else:
sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}") sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}")
await self.dest.storage.delete(meta.href, meta.etag) await self.dest.storage.delete(meta.href, meta.etag)
@ -290,7 +300,7 @@ class ResolveConflict(Action):
) )
def _get_actions(a_info, b_info): def _get_actions(a_info: _StorageInfo, b_info: _StorageInfo):
for ident in uniq( for ident in uniq(
itertools.chain( itertools.chain(
a_info.status.parent.iter_new(), a_info.status.parent.iter_old() a_info.status.parent.iter_new(), a_info.status.parent.iter_old()

View file

@ -1,4 +1,6 @@
from .. import exceptions from __future__ import annotations
from vdirsyncer import exceptions
class SyncError(exceptions.Error): class SyncError(exceptions.Error):

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import abc import abc
import contextlib import contextlib
import sqlite3 import sqlite3
@ -47,63 +49,63 @@ class _StatusBase(metaclass=abc.ABCMeta):
@abc.abstractmethod @abc.abstractmethod
def transaction(self): def transaction(self):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def insert_ident_a(self, ident, props): def insert_ident_a(self, ident, props):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def insert_ident_b(self, ident, props): def insert_ident_b(self, ident, props):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def update_ident_a(self, ident, props): def update_ident_a(self, ident, props):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def update_ident_b(self, ident, props): def update_ident_b(self, ident, props):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def remove_ident(self, ident): def remove_ident(self, ident):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_a(self, ident): def get_a(self, ident):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_b(self, ident): def get_b(self, ident):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_new_a(self, ident): def get_new_a(self, ident):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_new_b(self, ident): def get_new_b(self, ident):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def iter_old(self): def iter_old(self):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def iter_new(self): def iter_new(self):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_by_href_a(self, href, default=(None, None)): def get_by_href_a(self, href, default=(None, None)):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def get_by_href_b(self, href, default=(None, None)): def get_by_href_b(self, href, default=(None, None)):
raise NotImplementedError() raise NotImplementedError
@abc.abstractmethod @abc.abstractmethod
def rollback(self, ident): def rollback(self, ident):
raise NotImplementedError() raise NotImplementedError
class SqliteStatus(_StatusBase): class SqliteStatus(_StatusBase):
@ -167,6 +169,11 @@ class SqliteStatus(_StatusBase):
); """ ); """
) )
def close(self):
if self._c:
self._c.close()
self._c = None
def _is_latest_version(self): def _is_latest_version(self):
try: try:
return bool( return bool(
@ -185,7 +192,7 @@ class SqliteStatus(_StatusBase):
self._c = new_c self._c = new_c
yield yield
self._c.execute("DELETE FROM status") self._c.execute("DELETE FROM status")
self._c.execute("INSERT INTO status " "SELECT * FROM new_status") self._c.execute("INSERT INTO status SELECT * FROM new_status")
self._c.execute("DELETE FROM new_status") self._c.execute("DELETE FROM new_status")
finally: finally:
self._c = old_c self._c = old_c
@ -197,7 +204,7 @@ class SqliteStatus(_StatusBase):
raise IdentAlreadyExists(old_href=old_props.href, new_href=a_props.href) raise IdentAlreadyExists(old_href=old_props.href, new_href=a_props.href)
b_props = self.get_new_b(ident) or ItemMetadata() b_props = self.get_new_b(ident) or ItemMetadata()
self._c.execute( self._c.execute(
"INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)", "INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
( (
ident, ident,
a_props.href, a_props.href,
@ -216,7 +223,7 @@ class SqliteStatus(_StatusBase):
raise IdentAlreadyExists(old_href=old_props.href, new_href=b_props.href) raise IdentAlreadyExists(old_href=old_props.href, new_href=b_props.href)
a_props = self.get_new_a(ident) or ItemMetadata() a_props = self.get_new_a(ident) or ItemMetadata()
self._c.execute( self._c.execute(
"INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)", "INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
( (
ident, ident,
a_props.href, a_props.href,
@ -230,14 +237,14 @@ class SqliteStatus(_StatusBase):
def update_ident_a(self, ident, props): def update_ident_a(self, ident, props):
self._c.execute( self._c.execute(
"UPDATE new_status" " SET href_a=?, hash_a=?, etag_a=?" " WHERE ident=?", "UPDATE new_status SET href_a=?, hash_a=?, etag_a=? WHERE ident=?",
(props.href, props.hash, props.etag, ident), (props.href, props.hash, props.etag, ident),
) )
assert self._c.rowcount > 0 assert self._c.rowcount > 0
def update_ident_b(self, ident, props): def update_ident_b(self, ident, props):
self._c.execute( self._c.execute(
"UPDATE new_status" " SET href_b=?, hash_b=?, etag_b=?" " WHERE ident=?", "UPDATE new_status SET href_b=?, hash_b=?, etag_b=? WHERE ident=?",
(props.href, props.hash, props.etag, ident), (props.href, props.hash, props.etag, ident),
) )
assert self._c.rowcount > 0 assert self._c.rowcount > 0
@ -247,10 +254,10 @@ class SqliteStatus(_StatusBase):
def _get_impl(self, ident, side, table): def _get_impl(self, ident, side, table):
res = self._c.execute( res = self._c.execute(
"SELECT href_{side} AS href," f"SELECT href_{side} AS href,"
" hash_{side} AS hash," f" hash_{side} AS hash,"
" etag_{side} AS etag " f" etag_{side} AS etag "
"FROM {table} WHERE ident=?".format(side=side, table=table), f"FROM {table} WHERE ident=?",
(ident,), (ident,),
).fetchone() ).fetchone()
if res is None: if res is None:
@ -298,14 +305,14 @@ class SqliteStatus(_StatusBase):
return return
self._c.execute( self._c.execute(
"INSERT OR REPLACE INTO new_status" " VALUES (?, ?, ?, ?, ?, ?, ?)", "INSERT OR REPLACE INTO new_status VALUES (?, ?, ?, ?, ?, ?, ?)",
(ident, a.href, b.href, a.hash, b.hash, a.etag, b.etag), (ident, a.href, b.href, a.hash, b.hash, a.etag, b.etag),
) )
def _get_by_href_impl(self, href, default=(None, None), side=None): def _get_by_href_impl(self, href, default=(None, None), side=None):
res = self._c.execute( res = self._c.execute(
"SELECT ident, hash_{side} AS hash, etag_{side} AS etag " f"SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
"FROM status WHERE href_{side}=?".format(side=side), f"FROM status WHERE href_{side}=?",
(href,), (href,),
).fetchone() ).fetchone()
if not res: if not res:
@ -326,7 +333,7 @@ class SqliteStatus(_StatusBase):
class SubStatus: class SubStatus:
def __init__(self, parent, side): def __init__(self, parent: SqliteStatus, side: str):
self.parent = parent self.parent = parent
assert side in "ab" assert side in "ab"

View file

@ -1,6 +1,10 @@
from __future__ import annotations
import contextlib
import functools import functools
import os import os
import sys import sys
import tempfile
import uuid import uuid
from inspect import getfullargspec from inspect import getfullargspec
from typing import Callable from typing import Callable
@ -20,8 +24,7 @@ _missing = object()
def expand_path(p: str) -> str: def expand_path(p: str) -> str:
"""Expand $HOME in a path and normalise slashes.""" """Expand $HOME in a path and normalise slashes."""
p = os.path.expanduser(p) p = os.path.expanduser(p)
p = os.path.normpath(p) return os.path.normpath(p)
return p
def split_dict(d: dict, f: Callable): def split_dict(d: dict, f: Callable):
@ -74,7 +77,7 @@ def get_storage_init_specs(cls, stop_at=object):
spec = getfullargspec(cls.__init__) spec = getfullargspec(cls.__init__)
traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True) traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True)
if traverse_superclass: if traverse_superclass:
if traverse_superclass is True: # noqa if traverse_superclass is True:
supercls = next( supercls = next(
getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:] getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:]
) )
@ -84,7 +87,7 @@ def get_storage_init_specs(cls, stop_at=object):
else: else:
superspecs = () superspecs = ()
return (spec,) + superspecs return (spec, *superspecs)
def get_storage_init_args(cls, stop_at=object): def get_storage_init_args(cls, stop_at=object):
@ -123,12 +126,13 @@ def checkdir(path: str, create: bool = False, mode: int = 0o750) -> None:
raise exceptions.CollectionNotFound(f"Directory {path} does not exist.") raise exceptions.CollectionNotFound(f"Directory {path} does not exist.")
def checkfile(path, create=False): def checkfile(path, create=False) -> None:
""" """Check whether ``path`` is a file.
Check whether ``path`` is a file.
:param create: Whether to create the file's parent directories if they do :param create: Whether to create the file's parent directories if they do
not exist. not exist.
:raises CollectionNotFound: if path does not exist.
:raises OSError: if path exists but is not a file.
""" """
checkdir(os.path.dirname(path), create=create) checkdir(os.path.dirname(path), create=create)
if not os.path.isfile(path): if not os.path.isfile(path):
@ -141,24 +145,6 @@ def checkfile(path, create=False):
raise exceptions.CollectionNotFound(f"File {path} does not exist.") raise exceptions.CollectionNotFound(f"File {path} does not exist.")
class cached_property:
"""A read-only @property that is only evaluated once. Only usable on class
instances' methods.
"""
def __init__(self, fget, doc=None):
self.__name__ = fget.__name__
self.__module__ = fget.__module__
self.__doc__ = doc or fget.__doc__
self.fget = fget
def __get__(self, obj, cls):
if obj is None: # pragma: no cover
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
def href_safe(ident, safe=SAFE_UID_CHARS): def href_safe(ident, safe=SAFE_UID_CHARS):
return not bool(set(ident) - set(safe)) return not bool(set(ident) - set(safe))
@ -172,8 +158,7 @@ def generate_href(ident=None, safe=SAFE_UID_CHARS):
""" """
if not ident or not href_safe(ident, safe): if not ident or not href_safe(ident, safe):
return str(uuid.uuid4()) return str(uuid.uuid4())
else: return ident
return ident
def synchronized(lock=None): def synchronized(lock=None):
@ -206,7 +191,7 @@ def open_graphical_browser(url, new=0, autoraise=True):
cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"} cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"}
if webbrowser._tryorder is None: # Python 3.7 if webbrowser._tryorder is None: # Python 3.8
webbrowser.register_standard_browsers() webbrowser.register_standard_browsers()
for name in webbrowser._tryorder: for name in webbrowser._tryorder:
@ -217,4 +202,28 @@ def open_graphical_browser(url, new=0, autoraise=True):
if browser.open(url, new, autoraise): if browser.open(url, new, autoraise):
return return
raise RuntimeError("No graphical browser found. Please open the URL " "manually.") raise RuntimeError("No graphical browser found. Please open the URL manually.")
@contextlib.contextmanager
def atomic_write(dest, mode="wb", overwrite=False):
if "w" not in mode:
raise RuntimeError("`atomic_write` requires write access")
fd, src = tempfile.mkstemp(prefix=os.path.basename(dest), dir=os.path.dirname(dest))
file = os.fdopen(fd, mode=mode)
try:
yield file
except Exception:
os.unlink(src)
raise
else:
file.flush()
file.close()
if overwrite:
os.rename(src, dest)
else:
os.link(src, dest)
os.unlink(src)

View file

@ -1,8 +1,10 @@
from __future__ import annotations
import hashlib import hashlib
from functools import cached_property
from itertools import chain from itertools import chain
from itertools import tee from itertools import tee
from .utils import cached_property
from .utils import uniq from .utils import uniq
IGNORE_PROPS = ( IGNORE_PROPS = (
@ -34,7 +36,6 @@ IGNORE_PROPS = (
class Item: class Item:
"""Immutable wrapper class for VCALENDAR (VEVENT, VTODO) and """Immutable wrapper class for VCALENDAR (VEVENT, VTODO) and
VCARD""" VCARD"""
@ -187,7 +188,7 @@ def join_collection(items, wrappers=_default_join_wrappers):
""" """
items1, items2 = tee((_Component.parse(x) for x in items), 2) items1, items2 = tee((_Component.parse(x) for x in items), 2)
item_type, wrapper_type = _get_item_type(items1, wrappers) _item_type, wrapper_type = _get_item_type(items1, wrappers)
wrapper_props = [] wrapper_props = []
def _get_item_components(x): def _get_item_components(x):
@ -230,8 +231,7 @@ def _get_item_type(components, wrappers):
if not i: if not i:
return None, None return None, None
else: raise ValueError("Not sure how to join components.")
raise ValueError("Not sure how to join components.")
class _Component: class _Component:
@ -279,6 +279,12 @@ class _Component:
stack.append(cls(c_name, [], [])) stack.append(cls(c_name, [], []))
elif line.startswith("END:"): elif line.startswith("END:"):
component = stack.pop() component = stack.pop()
c_name = line[len("END:") :].strip().upper()
if c_name != component.name:
raise ValueError(
f"Got END:{c_name}, expected END:{component.name}"
+ f" at line {_i + 1}"
)
if stack: if stack:
stack[-1].subcomponents.append(component) stack[-1].subcomponents.append(component)
else: else:
@ -289,12 +295,16 @@ class _Component:
except IndexError: except IndexError:
raise ValueError(f"Parsing error at line {_i + 1}") raise ValueError(f"Parsing error at line {_i + 1}")
if len(stack) > 0:
raise ValueError(
f"Missing END for component(s): {', '.join(c.name for c in stack)}"
)
if multiple: if multiple:
return rv return rv
elif len(rv) != 1: if len(rv) != 1:
raise ValueError(f"Found {len(rv)} components, expected one.") raise ValueError(f"Found {len(rv)} components, expected one.")
else: return rv[0]
return rv[0]
def dump_lines(self): def dump_lines(self):
yield f"BEGIN:{self.name}" yield f"BEGIN:{self.name}"
@ -311,13 +321,12 @@ class _Component:
for line in lineiter: for line in lineiter:
if line.startswith(prefix): if line.startswith(prefix):
break break
else: new_lines.append(line)
new_lines.append(line)
else: else:
break break
for line in lineiter: for line in lineiter:
if not line.startswith((" ", "\t")): if not line.startswith((" ", "\t", *prefix)):
new_lines.append(line) new_lines.append(line)
break break
@ -335,10 +344,9 @@ class _Component:
return obj not in self.subcomponents and not any( return obj not in self.subcomponents and not any(
obj in x for x in self.subcomponents obj in x for x in self.subcomponents
) )
elif isinstance(obj, str): if isinstance(obj, str):
return self.get(obj, None) is not None return self.get(obj, None) is not None
else: raise ValueError(obj)
raise ValueError(obj)
def __getitem__(self, key): def __getitem__(self, key):
prefix_without_params = f"{key}:" prefix_without_params = f"{key}:"
@ -348,11 +356,11 @@ class _Component:
if line.startswith(prefix_without_params): if line.startswith(prefix_without_params):
rv = line[len(prefix_without_params) :] rv = line[len(prefix_without_params) :]
break break
elif line.startswith(prefix_with_params): if line.startswith(prefix_with_params):
rv = line[len(prefix_with_params) :].split(":", 1)[-1] rv = line[len(prefix_with_params) :].split(":", 1)[-1]
break break
else: else:
raise KeyError() raise KeyError
for line in iterlines: for line in iterlines:
if line.startswith((" ", "\t")): if line.startswith((" ", "\t")):