Compare commits

..

No commits in common. "main" and "v0.19.2" have entirely different histories.

93 changed files with 765 additions and 1523 deletions

View file

@ -10,12 +10,13 @@ packages:
- python-installer
- python-setuptools-scm
# Runtime dependencies:
- python-atomicwrites
- python-click
- python-click-log
- python-click-threading
- python-requests
- python-requests-toolbelt
- python-aiohttp-oauthlib
- python-tenacity
# Test dependencies:
- python-hypothesis
- python-pytest-cov
@ -35,8 +36,6 @@ environment:
REQUIREMENTS: release
# TODO: ETESYNC_TESTS
tasks:
- check-python:
python --version | grep 'Python 3.13'
- docker: |
sudo systemctl start docker
- setup: |

View file

@ -3,13 +3,12 @@
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
# here, and consider that our "oldest suppported environment".
image: alpine/3.19 # python 3.11
image: alpine/3.15
packages:
- docker
- docker-cli
- docker-compose
- py3-pip
- python3-dev
sources:
- https://github.com/pimutils/vdirsyncer
environment:
@ -18,6 +17,7 @@ environment:
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
DAV_SERVER: radicale xandikos
REQUIREMENTS: minimal
# TODO: ETESYNC_TESTS
tasks:
- venv: |
python3 -m venv $HOME/venv
@ -27,8 +27,6 @@ tasks:
sudo service docker start
- setup: |
cd vdirsyncer
# Hack, no idea why it's needed
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
make -e install-dev
- test: |
cd vdirsyncer

View file

@ -29,9 +29,6 @@ tasks:
cd vdirsyncer
make -e ci-test
make -e ci-test-storage
- check: |
cd vdirsyncer
make check
- check-secrets: |
# Stop here if this is a PR. PRs can't run with the below secrets.
[ -f ~/fastmail-secrets ] || complete-build

1
.envrc
View file

@ -1 +0,0 @@
layout python3

6
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,6 @@
python37:
image: python:3.7
before_script:
- make -e install-dev
script:
- make -e ci-test

View file

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v4.4.0
hooks:
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
@ -8,8 +8,12 @@ repos:
- id: check-toml
- id: check-added-large-files
- id: debug-statements
- repo: https://github.com/psf/black
rev: "23.3.0"
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.15.0"
rev: "v1.2.0"
hooks:
- id: mypy
files: vdirsyncer/.*
@ -17,12 +21,11 @@ repos:
- types-setuptools
- types-docutils
- types-requests
- types-atomicwrites
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.11.4'
rev: 'v0.0.265'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
- repo: local
hooks:
- id: typos-syncroniz

View file

@ -1,16 +0,0 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
python:
install:
- method: pip
path: .
extra_requirements:
- docs

View file

@ -4,13 +4,10 @@ Contributors
In alphabetical order:
- Ben Boeckel
- Bleala
- Christian Geier
- Clément Mondon
- Corey Hinshaw
- Kai Herlemann
- Hugo Osvaldo Barrera
- Jason Cox
- Julian Mehne
- Malte Kiefer
- Marek Marczykowski-Górecki
@ -19,7 +16,6 @@ In alphabetical order:
- rEnr3n
- Thomas Weißschuh
- Witcher01
- samm81
Special thanks goes to:

View file

@ -9,36 +9,6 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.21.0
==============
- Implement retrying for ``google`` storage type when a rate limit is reached.
- ``tenacity`` is now a required dependency.
- Drop support for Python 3.8.
- Retry transient network errors for nullipotent requests.
Version 0.20.0
==============
- Remove dependency on abandoned ``atomicwrites`` library.
- Implement ``filter_hook`` for the HTTP storage.
- Drop support for Python 3.7.
- Add support for Python 3.12 and Python 3.13.
- Properly close the status database after using. This especially affects tests,
where we were leaking a large amount of file descriptors.
- Extend supported versions of ``aiostream`` to include 0.7.x.
Version 0.19.3
==============
- Added a no_delete option to the storage configuration. :gh:`1090`
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
- Add an option to request vCard v4.0. :gh:`1066`
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
- Implement digest auth. :gh:`1137`
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
Version 0.19.2
==============
@ -73,10 +43,6 @@ Version 0.19.0
- Add a new ``showconfig`` status. This prints *some* configuration values as
JSON. This is intended to be used by external tools and helpers that interact
with ``vdirsyncer``, and considered experimental.
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
"create", it implicitly creates missing collections during sync without user
prompts. This simplifies workflows where collections should be automatically
created on both sides.
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
- ``pytest-localserver`` is no longer required for tests.

View file

@ -40,11 +40,6 @@ ci-test-storage:
done
bash $(CODECOV_PATH) -c
check:
ruff check
ruff format --diff
#mypy vdirsyncer
release-deb:
sh scripts/release-deb.sh debian jessie
sh scripts/release-deb.sh debian stretch
@ -54,10 +49,10 @@ release-deb:
install-dev:
pip install -U pip setuptools wheel
pip install -e '.[test,check,docs]'
pip install -e .
pip install -Ur test-requirements.txt -r docs-requirements.txt pre-commit
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
pip install pyproject-dependencies && \
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
fi
.PHONY: docs

View file

@ -40,7 +40,7 @@ servers. It can also be used to synchronize calendars and/or addressbooks
between two servers directly.
It aims to be for calendars and contacts what `OfflineIMAP
<https://www.offlineimap.org/>`_ is for emails.
<http://offlineimap.org/>`_ is for emails.
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
@ -59,15 +59,6 @@ Links of interest
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
Dockerized
=================
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
Docker environment, you can check out the following GitHub Repository:
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
License
=======

View file

@ -16,9 +16,6 @@ SPDX-License-Identifier: BSD-3-Clause
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
Author: <bernhard.reiter@intevation.de>
"""
from __future__ import annotations
import re
import subprocess
import sys
@ -54,8 +51,8 @@ def main(ical1_filename, ical2_filename):
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
"or the second entry:\n"
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
*additional_args,
]
+ additional_args
)
if r.returncode == 2:

4
docs-requirements.txt Normal file
View file

@ -0,0 +1,4 @@
# This file is used by readthedocs.org
sphinx != 1.4.7
sphinx_rtd_theme
setuptools_scm

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import datetime
import os
@ -20,7 +18,7 @@ copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
release = get_distribution("vdirsyncer").version
version = ".".join(release.split(".")[:2]) # The short X.Y version.
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release
exclude_patterns = ["_build"]
@ -37,7 +35,9 @@ except ImportError:
html_theme = "default"
if not on_rtd:
print("-" * 74)
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
print(
"Warning: sphinx-rtd-theme not installed, building with default " "theme."
)
print("-" * 74)
html_static_path = ["_static"]

View file

@ -61,8 +61,7 @@ Pair Section
sync`` is executed. See also :ref:`collections_tutorial`.
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
autodiscovery on a specific storage. It means all the collections on side A /
side B.
autodiscovery on a specific storage.
If the collection you want to sync doesn't have the same name on each side,
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
@ -72,8 +71,8 @@ Pair Section
Examples:
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
the collections from side B, and also the collections named "foo" and "bar".
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
collections from side B, and also the collections named "foo" and "bar".
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
existing collections on either side.
@ -128,16 +127,6 @@ Pair Section
The ``conflict_resolution`` parameter applies for these properties too.
.. _implicit_def:
- ``implicit``: Opt into implicitly creating collections. Example::
implicit = "create"
When set to "create", missing collections are automatically created on both
sides during sync without prompting the user. This simplifies workflows where
all collections should be synchronized bidirectionally.
.. _storage_config:
Storage Section
@ -249,7 +238,6 @@ CalDAV and CardDAV
#useragent = "vdirsyncer/0.16.4"
#verify_fingerprint = null
#auth_cert = null
#use_vcard_4 = false
:param url: Base URL or an URL to an addressbook.
:param username: Username for authentication.
@ -267,7 +255,6 @@ CalDAV and CardDAV
certificate and the key or a list of paths to the files
with them.
:param useragent: Default ``vdirsyncer``.
:param use_vcard_4: Whether the server use vCard 4.0.
Google
++++++
@ -389,7 +376,6 @@ Local
fileext = "..."
#encoding = "utf-8"
#post_hook = null
#pre_deletion_hook = null
#fileignoreext = ".tmp"
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
@ -411,8 +397,6 @@ Local
:param post_hook: A command to call for each item creation and
modification. The command will be called with the path of the
new/updated file.
:param pre_deletion_hook: A command to call for each item deletion.
The command will be called with the path of the deleted file.
:param fileeignoreext: The file extention to ignore. It is only useful
if fileext is set to the empty string. The default is ``.tmp``.
@ -494,7 +478,6 @@ leads to an error.
[storage holidays_remote]
type = "http"
url = https://example.com/holidays_from_hicksville.ics
#filter_hook = null
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
@ -519,8 +502,3 @@ leads to an error.
:param auth_cert: Optional. Either a path to a certificate with a client
certificate and the key or a list of paths to the files with them.
:param useragent: Default ``vdirsyncer``.
:param filter_hook: Optional. A filter command to call for each fetched
item, passed in raw form to stdin and returned via stdout.
If nothing is returned by the filter command, the item is skipped.
This can be used to alter fields as needed when dealing with providers
generating malformed events.

View file

@ -9,4 +9,7 @@ Support and Contact
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
concrete bug reports and feature requests.
* For security issues, contact ``contact@pimutils.org``.
* Lastly, you can also `contact the author directly
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
that doesn't work out (i.e. if I don't respond within one week), use
``contact@pimutils.org``.

View file

@ -81,7 +81,7 @@ virtualenv_ and run this inside of it::
# Install development dependencies, including:
# - vdirsyncer from the repo into the virtualenv
# - style checks and formatting (ruff)
# - stylecheckers (ruff) and code formatters (black)
make install-dev
# Install git commit hook for some extra linting and checking

View file

@ -10,7 +10,7 @@ OS/distro packages
The following packages are community-contributed and were up-to-date at the
time of writing:
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
- `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_
- `Ubuntu and Debian, x86_64-only
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
in the official repositories but may be out of date)
@ -42,7 +42,7 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
use Python's package manager "pip". First, you'll have to check that the
following things are installed:
- Python 3.9 to 3.13 and pip.
- Python 3.7 to 3.11 and pip.
- ``libxml`` and ``libxslt``
- ``zlib``
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
@ -59,32 +59,28 @@ pipx: The clean, easy way
~~~~~~~~~~~~~~~~~~~~~~~~~
pipx_ is a new package manager for Python-based software that automatically
sets up a virtual environment for each program it installs. Please note that
installing via pipx will not include manual pages nor systemd services.
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
Assuming that pipx is installed, vdirsyncer can be installed with::
sets up a virtual environment for each program you install. Assuming you have
it installed on your operating system, you can do::
pipx install vdirsyncer
It can later be updated to the latest version with::
and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
update vdirsyncer to the latest version::
pipx upgrade vdirsyncer
And can be uninstalled with::
If you're done with vdirsyncer, you can do::
pipx uninstall vdirsyncer
This last command will remove vdirsyncer and any dependencies installed into
the above location.
and vdirsyncer will be uninstalled, including its dependencies.
.. _pipx: https://github.com/pipxproject/pipx
The dirty, easy way
~~~~~~~~~~~~~~~~~~~
If pipx is not available on your distribution, the easiest way to install
If pipx is not available on your distirbution, the easiest way to install
vdirsyncer at this point would be to run::
pip install --ignore-installed vdirsyncer

View file

@ -78,19 +78,3 @@ You can also simply prompt for the password::
type = "caldav"
username = "myusername"
password.fetch = ["prompt", "Password for CalDAV"]
Environment variable
===============
To read the password from an environment variable::
[storage foo]
type = "caldav"
username = "myusername"
password.fetch = ["command", "printenv", "DAV_PW"]
This is especially handy if you use the same password multiple times
(say, for a CardDAV and a CalDAV storage).
On bash, you can read and export the password without printing::
read -s DAV_PW "DAV Password: " && export DAV_PW

View file

@ -46,9 +46,8 @@ You can install the all development dependencies with::
make install-dev
You probably don't want this since it will use pip to download the
dependencies. Alternatively test dependencies are listed as ``test`` optional
dependencies in ``pyproject.toml``, again with lower-bound version
requirements.
dependencies. Alternatively you can find the testing dependencies in
``test-requirements.txt``, again with lower-bound version requirements.
You also have to have vdirsyncer fully installed at this point. Merely
``cd``-ing into the tarball will not be sufficient.
@ -74,11 +73,10 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
take care of the HTML docs' formatting.
You can find a list of dependencies in ``pyproject.toml``, in the
``project.optional-dependencies`` section as ``docs``. Again, you can install
those using pip with::
You can find a list of dependencies in ``docs-requirements.txt``. Again, you
can install those using pip with::
pip install '.[docs]'
pip install -r docs-requirements.txt
Then change into the ``docs/`` directory and build whatever format you want
using the ``Makefile`` in there (run ``make`` for the formats you can build).

View file

@ -14,14 +14,14 @@ To pin the certificate by fingerprint::
[storage foo]
type = "caldav"
...
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
CA validation is disabled when pinning a fingerprint.
SHA256-Fingerprints can be used. CA validation is disabled when pinning a
fingerprint.
You can use the following command for obtaining a SHA256 fingerprint::
You can use the following command for obtaining a SHA-1 fingerprint::
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
that you can forget about all of that. It is easier to deploy a free

View file

@ -176,11 +176,8 @@ as a file called ``color`` within the calendar folder.
More information about collections
----------------------------------
"Collection" is a collective term for addressbooks and calendars. A Cardav or
Caldav server can contains several "collections" which correspond to several
addressbooks or calendar.
Each collection from a storage has a "collection name", a unique identifier for each
"Collection" is a collective term for addressbooks and calendars. Each
collection from a storage has a "collection name", a unique identifier for each
collection. In the case of :storage:`filesystem`-storage, this is the name of the
directory that represents the collection, in the case of the DAV-storages this
is the last segment of the URL. We use this identifier in the ``collections``

View file

@ -48,9 +48,10 @@ instance to subfolders of ``~/.calendar/``.
Setting up todoman
==================
Write this to ``~/.config/todoman/config.py``::
Write this to ``~/.config/todoman/todoman.conf``::
path = "~/.calendars/*"
[main]
path = ~/.calendars/*
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
which is exactly the tasklists we want. Now you can use ``todoman`` as

View file

@ -50,6 +50,7 @@ program chosen:
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
e.g. DAVx⁵_ or other apps bundled with smartphones.
e.g. DAVx⁵_ or the apps by dmfs_.
.. _DAVx⁵: https://www.davx5.com/
.. _dmfs: https://dmfs.org/

View file

@ -4,10 +4,8 @@
image: alpine/edge
packages:
- py3-build
- py3-pip
- py3-setuptools
- py3-setuptools_scm
- py3-wheel
- twine
sources:
@ -25,5 +23,5 @@ tasks:
git describe --exact-match --tags || complete-build
- publish: |
cd vdirsyncer
python -m build --no-isolation
twine upload --non-interactive dist/*
python setup.py sdist bdist_wheel
twine upload dist/*

View file

@ -1,90 +1,19 @@
# Vdirsyncer synchronizes calendars and contacts.
#
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
# how to package vdirsyncer.
[build-system]
requires = ["setuptools>=64", "setuptools_scm>=8"]
build-backend = "setuptools.build_meta"
[project]
name = "vdirsyncer"
authors = [
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
]
description = "Synchronize calendars and contacts"
readme = "README.rst"
requires-python = ">=3.9"
keywords = ["todo", "task", "icalendar", "cli"]
license = "BSD-3-Clause"
license-files = ["LICENSE"]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Office/Business :: Scheduling",
"Topic :: Utilities",
]
dependencies = [
"click>=5.0,<9.0",
"click-log>=0.3.0,<0.5.0",
"requests>=2.20.0",
"aiohttp>=3.8.2,<4.0.0",
"aiostream>=0.4.3,<0.8.0",
"tenacity>=9.0.0",
]
dynamic = ["version"]
[project.optional-dependencies]
google = ["aiohttp-oauthlib"]
test = [
"hypothesis>=6.72.0,<7.0.0",
"pytest",
"pytest-cov",
"pytest-httpserver",
"trustme",
"pytest-asyncio",
"aioresponses",
]
docs = [
"sphinx!=1.4.7",
"sphinx_rtd_theme",
"setuptools_scm",
]
check = [
"mypy",
"ruff",
"types-docutils",
"types-requests",
"types-setuptools",
]
[project.scripts]
vdirsyncer = "vdirsyncer.cli:app"
[tool.ruff.lint]
extend-select = [
"B0",
"C4",
[tool.ruff]
select = [
"E",
"I",
"RSE",
"SIM",
"TID",
"UP",
"F",
"W",
"B0",
"I",
"UP",
"C4",
# "TID",
"RSE"
]
target-version = "py37"
[tool.ruff.lint.isort]
[tool.ruff.isort]
force-single-line = true
required-imports = ["from __future__ import annotations"]
[tool.pytest.ini_options]
addopts = """
@ -96,19 +25,6 @@ addopts = """
--color=yes
"""
# filterwarnings=error
asyncio_default_fixture_loop_scope = "function"
[tool.mypy]
ignore_missing_imports = true
[tool.coverage.report]
exclude_lines = [
"if TYPE_CHECKING:",
]
[tool.setuptools.packages.find]
include = ["vdirsyncer*"]
[tool.setuptools_scm]
write_to = "vdirsyncer/version.py"
version_scheme = "no-guess-dev"

View file

@ -5,10 +5,8 @@ set -xeu
SCRIPT_PATH=$(realpath "$0")
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
# E.g.: debian, ubuntu
DISTRO=${DISTRO:1}
# E.g.: bullseye, bookwork
DISTROVER=${DISTROVER:2}
DISTRO=$1
DISTROVER=$2
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
CONTEXT="$(mktemp -d)"
@ -23,7 +21,7 @@ trap cleanup EXIT
cp scripts/_build_deb_in_container.bash "$CONTEXT"
python setup.py sdist -d "$CONTEXT"
docker run -it \
podman run -it \
--name "$CONTAINER_NAME" \
--volume "$CONTEXT:/source" \
"$DISTRO:$DISTROVER" \

82
setup.py Normal file
View file

@ -0,0 +1,82 @@
"""
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
"""
from setuptools import Command
from setuptools import find_packages
from setuptools import setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
"click>=5.0,<9.0",
"click-log>=0.3.0, <0.5.0",
"requests >=2.20.0",
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
"requests_toolbelt >=0.4.0",
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
"atomicwrites>=0.1.7",
"aiohttp>=3.8.0,<4.0.0",
"aiostream>=0.4.3,<0.5.0",
]
class PrintRequirements(Command):
description = "Prints minimal requirements"
user_options: list = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
with open("README.rst") as f:
long_description = f.read()
setup(
# General metadata
name="vdirsyncer",
author="Markus Unterwaditzer",
author_email="markus@unterwaditzer.net",
url="https://github.com/pimutils/vdirsyncer",
description="Synchronize calendars and contacts",
license="BSD",
long_description=long_description,
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
"google": ["aiohttp-oauthlib"],
},
# Build dependencies
setup_requires=["setuptools_scm != 1.12.0"],
# Other
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
cmdclass={"minimal_requirements": PrintRequirements},
use_scm_version={"write_to": "vdirsyncer/version.py"},
entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Internet",
"Topic :: Utilities",
],
)

7
test-requirements.txt Normal file
View file

@ -0,0 +1,7 @@
hypothesis>=5.0.0,<7.0.0
pytest
pytest-cov
pytest-httpserver
trustme
pytest-asyncio
aioresponses

View file

@ -1,9 +1,6 @@
"""
Test suite for vdirsyncer.
"""
from __future__ import annotations
import hypothesis.strategies as st
import urllib3.exceptions
@ -103,8 +100,10 @@ X-SOMETHING:{r}
HAHA:YES
END:FOO"""
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
printable_characters_strategy = st.text(
st.characters(blacklist_categories=("Cc", "Cs"))
)
uid_strategy = st.text(
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
).filter(lambda x: x.strip() == x)

View file

@ -1,9 +1,6 @@
"""
General-purpose fixtures for vdirsyncer's testsuite.
"""
from __future__ import annotations
import logging
import os
@ -45,7 +42,7 @@ settings.register_profile(
"deterministic",
settings(
derandomize=True,
suppress_health_check=list(HealthCheck),
suppress_health_check=HealthCheck.all(),
),
)
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
@ -59,12 +56,12 @@ else:
@pytest_asyncio.fixture
async def aio_session():
async def aio_session(event_loop):
async with aiohttp.ClientSession() as session:
yield session
@pytest_asyncio.fixture
async def aio_connector():
async def aio_connector(event_loop):
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
yield conn

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import random
import textwrap
import uuid
@ -10,15 +8,16 @@ import aiostream
import pytest
import pytest_asyncio
from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE
from tests import assert_item_equals
from tests import normalize_item
from vdirsyncer import exceptions
from vdirsyncer.storage.base import normalize_meta_value
from vdirsyncer.vobject import Item
from .. import EVENT_TEMPLATE
from .. import TASK_TEMPLATE
from .. import VCARD_TEMPLATE
from .. import assert_item_equals
from .. import normalize_item
def get_server_mixin(server_name):
from . import __name__ as base
@ -104,7 +103,7 @@ class StorageTests:
href, etag = await s.upload(get_item())
if etag is None:
_, etag = await s.get(href)
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
((href2, item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
assert href2 == href
assert etag2 == etag
@ -118,7 +117,7 @@ class StorageTests:
@pytest.mark.asyncio
async def test_upload(self, s, get_item):
item = get_item()
href, _etag = await s.upload(item)
href, etag = await s.upload(item)
assert_item_equals((await s.get(href))[0], item)
@pytest.mark.asyncio
@ -146,7 +145,7 @@ class StorageTests:
@pytest.mark.asyncio
async def test_wrong_etag(self, s, get_item):
item = get_item()
href, _etag = await s.upload(item)
href, etag = await s.upload(item)
with pytest.raises(exceptions.PreconditionFailed):
await s.update(href, item, '"lolnope"')
with pytest.raises(exceptions.PreconditionFailed):
@ -384,7 +383,7 @@ class StorageTests:
uid = str(uuid.uuid4())
item = Item(
textwrap.dedent(
f"""
"""
BEGIN:VCALENDAR
VERSION:2.0
BEGIN:VEVENT
@ -418,11 +417,13 @@ class StorageTests:
TRANSP:OPAQUE
END:VEVENT
END:VCALENDAR
"""
""".format(
uid=uid
)
).strip()
)
href, _etag = await s.upload(item)
href, etag = await s.upload(item)
item2, _etag2 = await s.get(href)
item2, etag2 = await s.get(href)
assert normalize_item(item) == normalize_item(item2)

View file

@ -1,10 +1,9 @@
from __future__ import annotations
import asyncio
import contextlib
import subprocess
import time
import uuid
from typing import Type
import aiostream
import pytest
@ -91,7 +90,7 @@ async def slow_create_collection(request, aio_connector):
# storage limits.
to_delete = []
async def inner(cls: type, args: dict, collection_name: str) -> dict:
async def inner(cls: Type, args: dict, collection_name: str) -> dict:
"""Create a collection
Returns args necessary to create a Storage instance pointing to it.

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os
import uuid
@ -8,11 +6,12 @@ import aiostream
import pytest
from tests import assert_item_equals
from tests.storage import StorageTests
from tests.storage import get_server_mixin
from vdirsyncer import exceptions
from vdirsyncer.vobject import Item
from .. import StorageTests
from .. import get_server_mixin
dav_server = os.environ.get("DAV_SERVER", "skip")
ServerMixin = get_server_mixin(dav_server)
@ -48,6 +47,6 @@ class DAVStorageTests(ServerMixin, StorageTests):
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
item = get_item(uid="град сатану" + str(uuid.uuid4()))
href, _etag = await s.upload(item)
item2, _etag2 = await s.get(href)
href, etag = await s.upload(item)
item2, etag2 = await s.get(href)
assert_item_equals(item, item2)

View file

@ -1,6 +1,3 @@
from __future__ import annotations
import contextlib
import datetime
from textwrap import dedent
@ -12,10 +9,10 @@ from aioresponses import aioresponses
from tests import EVENT_TEMPLATE
from tests import TASK_TEMPLATE
from tests import VCARD_TEMPLATE
from tests.storage import format_item
from vdirsyncer import exceptions
from vdirsyncer.storage.dav import CalDAVStorage
from .. import format_item
from . import DAVStorageTests
from . import dav_server
@ -31,16 +28,18 @@ class TestCalDAVStorage(DAVStorageTests):
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
# Most storages hard-fail, but xandikos doesn't.
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
try:
await s.upload(format_item(VCARD_TEMPLATE))
except (exceptions.Error, aiohttp.ClientResponseError):
# Most storages hard-fail, but xandikos doesn't.
pass
assert not await aiostream.stream.list(s.list())
# The `arg` param is not named `item_types` because that would hit
# https://bitbucket.org/pytest-dev/pytest/issue/745/
@pytest.mark.parametrize(
("arg", "calls_num"),
"arg,calls_num",
[
(("VTODO",), 1),
(("VEVENT",), 1),

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer.storage.dav import CardDAVStorage

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer.storage.dav import _BAD_XML_CHARS
@ -41,8 +39,8 @@ def test_xml_utilities():
def test_xml_specialchars(char):
x = _parse_xml(
'<?xml version="1.0" encoding="UTF-8" ?>'
f"<foo>ye{chr(char)}s\r\n"
"hello</foo>".encode("ascii")
"<foo>ye{}s\r\n"
"hello</foo>".format(chr(char)).encode("ascii")
)
if char in _BAD_XML_CHARS:
@ -52,7 +50,7 @@ def test_xml_specialchars(char):
@pytest.mark.parametrize(
"href",
[
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics", # noqa: E501
],
)
def test_normalize_href(href):

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os
import uuid
@ -13,7 +11,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
}
except KeyError as e:
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
pytestmark = pytest.mark.skip(f"Missing envkey: {str(e)}")
@pytest.mark.flaky(reruns=5)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os
import pytest
@ -8,13 +6,11 @@ import pytest
class ServerMixin:
@pytest.fixture
def get_storage_args(self, slow_create_collection, aio_connector, request):
if (
"item_type" in request.fixturenames
and request.getfixturevalue("item_type") == "VTODO"
):
# Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
if "item_type" in request.fixturenames:
if request.getfixturevalue("item_type") == "VTODO":
# Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"):
args = {

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os
import pytest
@ -10,7 +8,7 @@ class ServerMixin:
def get_storage_args(self, item_type, slow_create_collection):
if item_type != "VEVENT":
# iCloud collections can either be calendars or task lists.
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
pytest.skip("iCloud doesn't support anything else than VEVENT")
async def inner(collection="test"):

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import subprocess
import aiostream
@ -48,8 +46,7 @@ class TestFilesystemStorage(StorageTests):
s = self.storage_class(str(tmpdir), ".txt")
await s.upload(Item("UID:a/b/c"))
(item_file,) = tmpdir.listdir()
assert "/" not in item_file.basename
assert item_file.isfile()
assert "/" not in item_file.basename and item_file.isfile()
@pytest.mark.asyncio
async def test_ignore_tmp_files(self, tmpdir):
@ -90,7 +87,7 @@ class TestFilesystemStorage(StorageTests):
storage = self.storage_class(str(tmpdir), ".txt")
item = Item("UID:" + "hue" * 600)
href, _etag = await storage.upload(item)
href, etag = await storage.upload(item)
assert item.uid not in href
@pytest.mark.asyncio

View file

@ -1,16 +1,10 @@
from __future__ import annotations
import aiohttp
import pytest
from aiohttp import BasicAuth
from aioresponses import CallbackResult
from aioresponses import aioresponses
from tests import normalize_item
from vdirsyncer.exceptions import UserError
from vdirsyncer.http import BasicAuthMethod
from vdirsyncer.http import DigestAuthMethod
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
from vdirsyncer.storage.http import HttpStorage
from vdirsyncer.storage.http import prepare_auth
@ -41,7 +35,7 @@ async def test_list(aio_connector):
),
]
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2
def callback(url, headers, **kwargs):
assert headers["User-Agent"].startswith("vdirsyncer/")
@ -95,14 +89,16 @@ def test_readonly_param(aio_connector):
def test_prepare_auth():
assert prepare_auth(None, "", "") is None
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
assert prepare_auth(None, "user", "pwd") == BasicAuth("user", "pwd")
assert prepare_auth("basic", "user", "pwd") == BasicAuth("user", "pwd")
with pytest.raises(ValueError) as excinfo:
assert prepare_auth("basic", "", "pwd")
assert "you need to specify username and password" in str(excinfo.value).lower()
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
from requests.auth import HTTPDigestAuth
assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
with pytest.raises(ValueError) as excinfo:
prepare_auth("ladida", "user", "pwd")
@ -110,12 +106,20 @@ def test_prepare_auth():
assert "unknown authentication method" in str(excinfo.value).lower()
def test_prepare_auth_guess():
# guess auth is currently not supported
with pytest.raises(UserError) as excinfo:
prepare_auth("guess", "usr", "pwd")
def test_prepare_auth_guess(monkeypatch):
import requests_toolbelt.auth.guess
assert "not supported" in str(excinfo.value).lower()
assert isinstance(
prepare_auth("guess", "user", "pwd"),
requests_toolbelt.auth.guess.GuessAuth,
)
monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
with pytest.raises(UserError) as excinfo:
prepare_auth("guess", "user", "pwd")
assert "requests_toolbelt is too old" in str(excinfo.value).lower()
def test_verify_false_disallowed(aio_connector):
@ -123,41 +127,3 @@ def test_verify_false_disallowed(aio_connector):
HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
assert "must be a path to a pem-file." in str(excinfo.value).lower()
@pytest.mark.asyncio
async def test_403_usage_limit_exceeded(aio_connector):
url = "http://127.0.0.1/test_403"
error_body = {
"error": {
"errors": [
{
"domain": "usageLimits",
"message": "Calendar usage limits exceeded.",
"reason": "quotaExceeded",
}
],
"code": 403,
"message": "Calendar usage limits exceeded.",
}
}
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, payload=error_body, repeat=True)
with pytest.raises(UsageLimitReached):
await request("GET", url, session)
@pytest.mark.asyncio
async def test_403_without_usage_limits_domain(aio_connector):
"""A 403 JSON error without the Google 'usageLimits' domain should not be
treated as UsageLimitReached and should surface as ClientResponseError.
"""
url = "http://127.0.0.1/test_403_no_usage_limits"
async with aiohttp.ClientSession(connector=aio_connector) as session:
with aioresponses() as m:
m.get(url, status=403, repeat=True)
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, session)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import aiostream
import pytest
from aioresponses import CallbackResult
@ -20,7 +18,7 @@ class CombinedStorage(Storage):
storage_name = "http_and_singlefile"
def __init__(self, url, path, *, connector, **kwargs):
if kwargs.get("collection") is not None:
if kwargs.get("collection", None) is not None:
raise ValueError
super().__init__(**kwargs)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer.storage.memory import MemoryStorage

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer.storage.singlefile import SingleFileStorage

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import io
from textwrap import dedent
@ -26,7 +24,7 @@ def read_config(tmpdir, monkeypatch):
def test_read_config(read_config):
_errors, c = read_config(
errors, c = read_config(
"""
[general]
status_path = "/tmp/status/"
@ -222,62 +220,3 @@ def test_validate_collections_param():
x([["c", None, "b"]])
x([["c", "a", None]])
x([["c", None, None]])
def test_invalid_implicit_value(read_config):
expected_message = "`implicit` parameter must be 'create' or absent"
with pytest.raises(exceptions.UserError) as excinfo:
read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = null
implicit = "invalid"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert expected_message in str(excinfo.value)
def test_implicit_create_only(read_config):
"""Test that implicit create works."""
errors, c = read_config(
"""
[general]
status_path = "/tmp/status/"
[pair my_pair]
a = "my_a"
b = "my_b"
collections = ["from a", "from b"]
implicit = "create"
[storage my_a]
type = "filesystem"
path = "{base}/path_a/"
fileext = ".txt"
[storage my_b]
type = "filesystem"
path = "{base}/path_b/"
fileext = ".txt"
"""
)
assert not errors
pair = c.pairs["my_pair"]
assert pair.implicit == "create"

View file

@ -1,7 +1,6 @@
from __future__ import annotations
import json
from textwrap import dedent
from typing import List
import pytest
@ -153,7 +152,7 @@ def test_discover_direct_path(tmpdir, runner):
def test_null_collection_with_named_collection(tmpdir, runner):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
@ -161,13 +160,15 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage foo]
type = "filesystem"
path = "{tmpdir!s}/foo/"
path = "{base}/foo/"
fileext = ".txt"
[storage bar]
type = "singlefile"
path = "{tmpdir!s}/bar.txt"
"""
path = "{base}/bar.txt"
""".format(
base=str(tmpdir)
)
)
)
@ -191,7 +192,7 @@ def test_null_collection_with_named_collection(tmpdir, runner):
@pytest.mark.parametrize(
("a_requires", "b_requires"),
"a_requires,b_requires",
[
(True, True),
(True, False),
@ -211,7 +212,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
async def get(self, href: str):
raise NotImplementedError
async def list(self) -> list[tuple]:
async def list(self) -> List[tuple]:
raise NotImplementedError
from vdirsyncer.cli.utils import storage_names
@ -220,7 +221,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
@ -228,12 +229,14 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
[storage foo]
type = "test"
require_collection = {json.dumps(a_requires)}
require_collection = {a}
[storage bar]
type = "test"
require_collection = {json.dumps(b_requires)}
"""
require_collection = {b}
""".format(
a=json.dumps(a_requires), b=json.dumps(b_requires)
)
)
)

View file

@ -1,12 +1,10 @@
from __future__ import annotations
from textwrap import dedent
def test_get_password_from_command(tmpdir, runner):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
@ -14,14 +12,16 @@ def test_get_password_from_command(tmpdir, runner):
[storage foo]
type.fetch = ["shell", "echo filesystem"]
path = "{tmpdir!s}/foo/"
path = "{base}/foo/"
fileext.fetch = ["command", "echo", ".txt"]
[storage bar]
type = "filesystem"
path = "{tmpdir!s}/bar/"
path = "{base}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"]
"""
""".format(
base=str(tmpdir)
)
)
)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent
import pytest
@ -58,7 +56,7 @@ def test_repair_uids(storage, runner, repair_uids):
else:
opt = ["--no-repair-unsafe-uid"]
result = runner.invoke(["repair", *opt, "foo"], input="y")
result = runner.invoke(["repair"] + opt + ["foo"], input="y")
assert not result.exception
if repair_uids:

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import json
import sys
from textwrap import dedent
@ -90,7 +88,9 @@ def test_empty_storage(tmpdir, runner):
result = runner.invoke(["sync"])
lines = result.output.splitlines()
assert lines[0] == "Syncing my_pair"
assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.')
assert lines[1].startswith(
"error: my_pair: " 'Storage "my_b" was completely emptied.'
)
assert result.exception
@ -280,22 +280,24 @@ def test_multiple_pairs(tmpdir, runner):
def test_create_collections(collections, tmpdir, runner):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
collections = {json.dumps(list(collections))}
collections = {colls}
[storage foo]
type = "filesystem"
path = "{tmpdir!s}/foo/"
path = "{base}/foo/"
fileext = ".txt"
[storage bar]
type = "filesystem"
path = "{tmpdir!s}/bar/"
path = "{base}/bar/"
fileext = ".txt"
"""
""".format(
base=str(tmpdir), colls=json.dumps(list(collections))
)
)
)
@ -313,7 +315,7 @@ def test_create_collections(collections, tmpdir, runner):
def test_ident_conflict(tmpdir, runner):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
@ -321,14 +323,16 @@ def test_ident_conflict(tmpdir, runner):
[storage foo]
type = "filesystem"
path = "{tmpdir!s}/foo/"
path = "{base}/foo/"
fileext = ".txt"
[storage bar]
type = "filesystem"
path = "{tmpdir!s}/bar/"
path = "{base}/bar/"
fileext = ".txt"
"""
""".format(
base=str(tmpdir)
)
)
)
@ -358,7 +362,7 @@ def test_ident_conflict(tmpdir, runner):
@pytest.mark.parametrize(
("existing", "missing"),
"existing,missing",
[
("foo", "bar"),
("bar", "foo"),
@ -367,7 +371,7 @@ def test_ident_conflict(tmpdir, runner):
def test_unknown_storage(tmpdir, runner, existing, missing):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
@ -375,9 +379,11 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
[storage {existing}]
type = "filesystem"
path = "{tmpdir!s}/{existing}/"
path = "{base}/{existing}/"
fileext = ".txt"
"""
""".format(
base=str(tmpdir), existing=existing
)
)
)
@ -387,8 +393,10 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
assert result.exception
assert (
f"Storage '{missing}' not found. "
f"These are the configured storages: ['{existing}']"
"Storage '{missing}' not found. "
"These are the configured storages: ['{existing}']".format(
missing=missing, existing=existing
)
) in result.output
@ -402,29 +410,31 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
@pytest.mark.parametrize(
("resolution", "expect_foo", "expect_bar"),
"resolution,expect_foo,expect_bar",
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
)
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
runner.write_with_general(
dedent(
f"""
"""
[pair foobar]
a = "foo"
b = "bar"
collections = null
conflict_resolution = {json.dumps(resolution)}
conflict_resolution = {val}
[storage foo]
type = "filesystem"
fileext = ".txt"
path = "{tmpdir!s}/foo"
path = "{base}/foo"
[storage bar]
type = "filesystem"
fileext = ".txt"
path = "{tmpdir!s}/bar"
"""
path = "{base}/bar"
""".format(
base=str(tmpdir), val=json.dumps(resolution)
)
)
)
@ -516,11 +526,13 @@ def test_fetch_only_necessary_params(tmpdir, runner):
fetch_script = tmpdir.join("fetch_script")
fetch_script.write(
dedent(
f"""
"""
set -e
touch "{fetched_file!s}"
touch "{}"
echo ".txt"
"""
""".format(
str(fetched_file)
)
)
)
@ -551,7 +563,9 @@ def test_fetch_only_necessary_params(tmpdir, runner):
type = "filesystem"
path = "{path}"
fileext.fetch = ["command", "sh", "{script}"]
""".format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script))
""".format(
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
)
)
)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import pytest
from vdirsyncer import exceptions
@ -14,7 +12,7 @@ def test_handle_cli_error(capsys):
except BaseException:
handle_cli_error()
_out, err = capsys.readouterr()
out, err = capsys.readouterr()
assert "returned something vdirsyncer doesn't understand" in err
assert "ayy lmao" in err

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import ssl
import pytest

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import logging
import aiohttp
@ -22,7 +20,7 @@ def test_get_storage_init_args():
from vdirsyncer.storage.memory import MemoryStorage
all, required = utils.get_storage_init_args(MemoryStorage)
assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"}
assert all == {"fileext", "collection", "read_only", "instance_name"}
assert not required

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import os
from vdirsyncer.cli.config import _resolve_conflict_via_command

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import aiostream
import pytest
@ -9,7 +7,7 @@ missing = object()
@pytest.mark.parametrize(
("shortcuts", "expected"),
"shortcuts,expected",
[
(
["from a"],

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from contextlib import contextmanager
from unittest.mock import patch

View file

@ -1,7 +1,3 @@
from __future__ import annotations
import contextlib
import hypothesis.strategies as st
from hypothesis import assume
from hypothesis import given
@ -26,13 +22,13 @@ def test_legacy_status(status_dict):
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
with contextlib.closing(SqliteStatus()) as status:
status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict
status = SqliteStatus()
status.load_legacy_status(status_dict)
assert dict(status.to_legacy_status()) == status_dict
for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident
for ident, (meta_a, meta_b) in status_dict.items():
ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
assert meta2_a.to_status() == meta_a
assert meta2_b.to_status() == meta_b
assert ident_a == ident_b == ident

View file

@ -1,7 +1,4 @@
from __future__ import annotations
import asyncio
import contextlib
from copy import deepcopy
import aiostream
@ -26,12 +23,13 @@ from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.vobject import Item
async def sync(a, b, status, *args, **kwargs) -> None:
with contextlib.closing(SqliteStatus(":memory:")) as new_status:
new_status.load_legacy_status(status)
await _sync(a, b, new_status, *args, **kwargs)
status.clear()
status.update(new_status.to_legacy_status())
async def sync(a, b, status, *args, **kwargs):
new_status = SqliteStatus(":memory:")
new_status.load_legacy_status(status)
rv = await _sync(a, b, new_status, *args, **kwargs)
status.clear()
status.update(new_status.to_legacy_status())
return rv
def empty_storage(x):
@ -98,8 +96,7 @@ async def test_read_only_and_prefetch():
await sync(a, b, status, force_delete=True)
await sync(a, b, status, force_delete=True)
assert not items(a)
assert not items(b)
assert not items(a) and not items(b)
@pytest.mark.asyncio
@ -227,8 +224,7 @@ async def test_insert_hash():
await a.update(href, Item("UID:1\nHAHA:YES"), etag)
await sync(a, b, status)
assert "hash" in status["1"][0]
assert "hash" in status["1"][1]
assert "hash" in status["1"][0] and "hash" in status["1"][1]
@pytest.mark.asyncio
@ -348,7 +344,7 @@ async def test_uses_get_multi(monkeypatch):
a = MemoryStorage()
b = MemoryStorage()
item = Item("UID:1")
expected_href, _etag = await a.upload(item)
expected_href, etag = await a.upload(item)
await sync(a, b, {})
assert get_multi_calls == [[expected_href]]
@ -385,7 +381,7 @@ async def test_changed_uids():
a = MemoryStorage()
b = MemoryStorage()
href_a, etag_a = await a.upload(Item("UID:A-ONE"))
_href_b, _etag_b = await b.upload(Item("UID:B-ONE"))
href_b, etag_b = await b.upload(Item("UID:B-ONE"))
status = {}
await sync(a, b, status)
@ -439,7 +435,7 @@ async def test_partial_sync_revert():
assert items(a) == {"UID:2"}
@pytest.mark.parametrize("sync_inbetween", [True, False])
@pytest.mark.parametrize("sync_inbetween", (True, False))
@pytest.mark.asyncio
async def test_ident_conflict(sync_inbetween):
a = MemoryStorage()
@ -469,7 +465,7 @@ async def test_moved_href():
a = MemoryStorage()
b = MemoryStorage()
status = {}
_href, _etag = await a.upload(Item("UID:haha"))
href, etag = await a.upload(Item("UID:haha"))
await sync(a, b, status)
b.items["lol"] = b.items.pop("haha")
@ -530,7 +526,7 @@ async def test_unicode_hrefs():
a = MemoryStorage()
b = MemoryStorage()
status = {}
_href, _etag = await a.upload(Item("UID:äää"))
href, etag = await a.upload(Item("UID:äää"))
await sync(a, b, status)
@ -553,7 +549,7 @@ class SyncMachine(RuleBasedStateMachine):
if flaky_etags:
async def get(href):
_old_etag, item = s.items[href]
old_etag, item = s.items[href]
etag = _random_string()
s.items[href] = etag, item
return item, etag
@ -644,7 +640,10 @@ class SyncMachine(RuleBasedStateMachine):
errors = []
error_callback = errors.append if with_error_callback else None
if with_error_callback:
error_callback = errors.append
else:
error_callback = None
try:
# If one storage is read-only, double-sync because changes don't
@ -667,8 +666,7 @@ class SyncMachine(RuleBasedStateMachine):
except ActionIntentionallyFailed:
pass
except BothReadOnly:
assert a.read_only
assert b.read_only
assert a.read_only and b.read_only
assume(False)
except StorageEmpty:
if force_delete:

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from vdirsyncer import exceptions

View file

@ -1,7 +1,3 @@
from __future__ import annotations
import asyncio
import hypothesis.strategies as st
import pytest
import pytest_asyncio
@ -35,8 +31,7 @@ async def test_basic(monkeypatch):
await a.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"])
assert await a.get_meta("foo") is None
assert await b.get_meta("foo") is None
assert await a.get_meta("foo") is None and await b.get_meta("foo") is None
await a.set_meta("foo", "bar")
await metasync(a, b, status, keys=["foo"])
@ -55,24 +50,27 @@ async def test_basic(monkeypatch):
await b.set_meta("foo", None)
await metasync(a, b, status, keys=["foo"])
assert not await a.get_meta("foo")
assert not await b.get_meta("foo")
assert not await a.get_meta("foo") and not await b.get_meta("foo")
@pytest_asyncio.fixture
async def conflict_state(request):
@pytest.mark.asyncio
async def conflict_state(request, event_loop):
a = MemoryStorage()
b = MemoryStorage()
status = {}
await a.set_meta("foo", "bar")
await b.set_meta("foo", "baz")
async def do_cleanup():
assert await a.get_meta("foo") == "bar"
assert await b.get_meta("foo") == "baz"
assert not status
def cleanup():
async def do_cleanup():
assert await a.get_meta("foo") == "bar"
assert await b.get_meta("foo") == "baz"
assert not status
request.addfinalizer(lambda: asyncio.run(do_cleanup()))
event_loop.run_until_complete(do_cleanup())
request.addfinalizer(cleanup)
return a, b, status

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import aiostream
import pytest
from hypothesis import HealthCheck
@ -17,7 +15,7 @@ from vdirsyncer.vobject import Item
@given(uid=uid_strategy)
# Using the random module for UIDs:
@settings(suppress_health_check=list(HealthCheck))
@settings(suppress_health_check=HealthCheck.all())
@pytest.mark.asyncio
async def test_repair_uids(uid):
s = MemoryStorage()
@ -40,12 +38,12 @@ async def test_repair_uids(uid):
@given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
# Using the random module for UIDs:
@settings(suppress_health_check=list(HealthCheck))
@settings(suppress_health_check=HealthCheck.all())
@pytest.mark.asyncio
async def test_repair_unsafe_uids(uid):
s = MemoryStorage()
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
href, _etag = await s.upload(item)
href, etag = await s.upload(item)
assert (await s.get(href))[0].uid == uid
assert not href_safe(uid)
@ -58,7 +56,7 @@ async def test_repair_unsafe_uids(uid):
@pytest.mark.parametrize(
("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
"uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
)
def test_repair_unsafe_href(uid, href):
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")

View file

@ -1,136 +0,0 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock
from unittest.mock import Mock
import aiohttp
import pytest
from vdirsyncer.http import UsageLimitReached
from vdirsyncer.http import request
async def _create_mock_response(status: int, body: str | dict):
raw_body = body
text_body = json.dumps(body) if isinstance(body, dict) else body
mock_response = AsyncMock()
mock_response.status = status
mock_response.ok = 200 <= status < 300
mock_response.reason = "OK" if mock_response.ok else "Forbidden"
mock_response.headers = (
{"Content-Type": "application/json"}
if isinstance(raw_body, dict)
else {"Content-Type": "text/plain"}
)
mock_response.text.return_value = text_body
if isinstance(raw_body, dict):
mock_response.json.return_value = raw_body
else:
mock_response.json.side_effect = ValueError("Not JSON")
mock_response.raise_for_status = Mock(
side_effect=(
aiohttp.ClientResponseError(
request_info=AsyncMock(),
history=(),
status=status,
message=mock_response.reason,
headers=mock_response.headers,
)
if not mock_response.ok
else None
)
)
return mock_response
@pytest.mark.asyncio
async def test_request_retry_on_usage_limit():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate (max_retries - 1) 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries - 1)
] + [await _create_mock_response(200, "OK")]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
response = await request("GET", url, mock_session)
assert response.status == 200
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_retry_exceeds_max_attempts():
url = "http://example.com/api"
max_retries = 5 # As configured in the @retry decorator
mock_session = AsyncMock()
# Simulate max_retries 403 errors and then a 200 OK
mock_session.request.side_effect = [
await _create_mock_response(
403,
{
"error": {
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
}
},
)
for _ in range(max_retries)
]
async with (
aiohttp.ClientSession()
): # Dummy session. Will be replaced by mock_session at call
with pytest.raises(UsageLimitReached):
await request("GET", url, mock_session)
assert mock_session.request.call_count == max_retries
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_json():
url = "http://example.com/api"
mock_session = AsyncMock()
# Generic non-Google 403 error payload (e.g., GitHub-style)
mock_session.request.side_effect = [
await _create_mock_response(403, {"message": "API rate limit exceeded"})
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because it's not the Google quotaExceeded shape
assert mock_session.request.call_count == 1
@pytest.mark.asyncio
async def test_request_no_retry_on_generic_403_text():
url = "http://example.com/api"
mock_session = AsyncMock()
# Plain-text 403 body mentioning rate limits, but not structured as Google error
mock_session.request.side_effect = [
await _create_mock_response(403, "Rate limit exceeded")
]
async with aiohttp.ClientSession():
with pytest.raises(aiohttp.ClientResponseError):
await request("GET", url, mock_session)
# Should not retry because the JSON shape is not Google quotaExceeded
assert mock_session.request.call_count == 1

View file

@ -1,5 +1,3 @@
from __future__ import annotations
from textwrap import dedent
import hypothesis.strategies as st
@ -25,7 +23,7 @@ _simple_split = [
]
_simple_joined = "\r\n".join(
["BEGIN:VADDRESSBOOK", *_simple_split, "END:VADDRESSBOOK\r\n"]
["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"]
)
@ -124,7 +122,7 @@ def test_split_collection_timezones():
"END:VTIMEZONE"
)
full = "\r\n".join(["BEGIN:VCALENDAR", *items, timezone, "END:VCALENDAR"])
full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"])
given = {normalize_item(item) for item in vobject.split_collection(full)}
expected = {
@ -154,7 +152,7 @@ def test_hash_item():
def test_multiline_uid(benchmark):
a = "BEGIN:FOO\r\nUID:123456789abcd\r\n efgh\r\nEND:FOO\r\n"
a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n"
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
@ -237,31 +235,6 @@ def test_broken_item():
assert item.parsed is None
def test_mismatched_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"END:BAR",
]
)
assert "Got END:BAR, expected END:FOO at line 2" in str(excinfo.value)
def test_missing_end():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
[
"BEGIN:FOO",
"BEGIN:BAR",
"END:BAR",
]
)
assert "Missing END for component(s): FOO" in str(excinfo.value)
def test_multiple_items():
with pytest.raises(ValueError) as excinfo:
vobject._Component.parse(
@ -299,7 +272,7 @@ def test_input_types():
value_strategy = st.text(
st.characters(
exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), exclude_characters=":="
blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":="
),
min_size=1,
).filter(lambda x: x.strip() == x)
@ -335,8 +308,7 @@ class VobjectMachine(RuleBasedStateMachine):
assert key in c
assert c.get(key) == value
dump = "\r\n".join(c.dump_lines())
assert key in dump
assert value in dump
assert key in dump and value in dump
@rule(
c=Parsed,
@ -366,16 +338,6 @@ class VobjectMachine(RuleBasedStateMachine):
TestVobjectMachine = VobjectMachine.TestCase
def test_dupe_consecutive_keys():
state = VobjectMachine()
unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
parsed_0 = state.parse(unparsed=unparsed_0)
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
state.add_prop(c=parsed_0, key="0", value="1")
state.teardown()
def test_component_contains():
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
@ -383,4 +345,4 @@ def test_component_contains():
assert "BAZ" not in item
with pytest.raises(ValueError):
42 in item # noqa: B015, this check raises.
42 in item # noqa: B015

View file

@ -2,14 +2,13 @@
Vdirsyncer synchronizes calendars and contacts.
"""
from __future__ import annotations
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
BUGTRACKER_HOME = PROJECT_HOME + "/issues"
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
try:
from .version import version as __version__
from .version import version as __version__ # noqa
except ImportError: # pragma: no cover
raise ImportError(
"Failed to find (autogenerated) version.py. "
@ -17,14 +16,12 @@ except ImportError: # pragma: no cover
"use the PyPI ones."
)
__all__ = ["__version__"]
def _check_python_version():
def _check_python_version(): # pragma: no cover
import sys
if sys.version_info < (3, 9, 0): # noqa: UP036
print("vdirsyncer requires at least Python 3.9.")
if sys.version_info < (3, 7, 0):
print("vdirsyncer requires at least Python 3.7.")
sys.exit(1)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
if __name__ == "__main__":
from vdirsyncer.cli import app

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import asyncio
import functools
import json
@ -10,15 +8,12 @@ import aiohttp
import click
import click_log
from vdirsyncer import BUGTRACKER_HOME
from vdirsyncer import __version__
from .. import BUGTRACKER_HOME
from .. import __version__
cli_logger = logging.getLogger(__name__)
click_log.basic_config("vdirsyncer")
# add short option for the help option
click_context_settings = {"help_option_names": ["-h", "--help"]}
class AppContext:
def __init__(self):
@ -44,13 +39,13 @@ def catch_errors(f):
return inner
@click.group(context_settings=click_context_settings)
@click.group()
@click_log.simple_verbosity_option("vdirsyncer")
@click.version_option(version=__version__)
@click.option("--config", "-c", metavar="FILE", help="Config file to use.")
@pass_context
@catch_errors
def app(ctx, config: str):
def app(ctx, config):
"""
Synchronize calendars and contacts
"""
@ -59,7 +54,7 @@ def app(ctx, config: str):
cli_logger.warning(
"Vdirsyncer currently does not support Windows. "
"You will likely encounter bugs. "
f"See {BUGTRACKER_HOME}/535 for more information."
"See {}/535 for more information.".format(BUGTRACKER_HOME)
)
if not ctx.config:
@ -68,6 +63,9 @@ def app(ctx, config: str):
ctx.config = load_config(config)
main = app
def collections_arg_callback(ctx, param, value):
"""
Expand the various CLI shortforms ("pair, pair/collection") to an iterable
@ -147,14 +145,7 @@ def sync(ctx, collections, force_delete):
)
)
# `return_exceptions=True` ensures that the event loop lives long enough for
# backoffs to be able to finish
gathered = await asyncio.gather(*tasks, return_exceptions=True)
# but now we need to manually check for and propogate a single failure after
# allowing all tasks to finish in order to keep exit status non-zero
failures = [e for e in gathered if isinstance(e, BaseException)]
if failures:
raise failures[0]
await asyncio.gather(*tasks)
asyncio.run(main(collections))

View file

@ -3,18 +3,13 @@ from __future__ import annotations
import json
import os
import string
from collections.abc import Generator
from configparser import RawConfigParser
from functools import cached_property
from itertools import chain
from typing import IO
from typing import Any
from vdirsyncer import PROJECT_HOME
from vdirsyncer import exceptions
from vdirsyncer.utils import expand_path
from vdirsyncer.vobject import Item
from .. import PROJECT_HOME
from .. import exceptions
from ..utils import cached_property
from ..utils import expand_path
from .fetchparams import expand_fetch_params
from .utils import storage_class_from_config
@ -28,16 +23,16 @@ def validate_section_name(name, section_type):
if invalid:
chars_display = "".join(sorted(SECTION_NAME_CHARS))
raise exceptions.UserError(
f'The {section_type}-section "{name}" contains invalid characters. Only '
'The {}-section "{}" contains invalid characters. Only '
"the following characters are allowed for storage and "
f"pair names:\n{chars_display}"
"pair names:\n{}".format(section_type, name, chars_display)
)
def _validate_general_section(general_config: dict[str, str]):
def _validate_general_section(general_config):
invalid = set(general_config) - GENERAL_ALL
missing = GENERAL_REQUIRED - set(general_config)
problems: list[str] = []
problems = []
if invalid:
problems.append(
@ -52,7 +47,7 @@ def _validate_general_section(general_config: dict[str, str]):
if problems:
raise exceptions.UserError(
"Invalid general section. Copy the example "
f"config from the repository and edit it: {PROJECT_HOME}",
"config from the repository and edit it: {}".format(PROJECT_HOME),
problems=problems,
)
@ -93,31 +88,21 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.")
collection_names.add(collection_name)
except ValueError as e:
raise ValueError(f"`collections` parameter, position {i}: {e!s}")
def _validate_implicit_param(implicit):
if implicit is None:
return
if implicit != "create":
raise ValueError("`implicit` parameter must be 'create' or absent.")
raise ValueError(f"`collections` parameter, position {i}: {str(e)}")
class _ConfigReader:
def __init__(self, f: IO[Any]):
self._file: IO[Any] = f
def __init__(self, f):
self._file = f
self._parser = c = RawConfigParser()
c.read_file(f)
self._seen_names: set = set()
self._seen_names = set()
self._general: dict[str, str] = {}
self._pairs: dict[str, dict[str, str]] = {}
self._storages: dict[str, dict[str, str]] = {}
self._general = {}
self._pairs = {}
self._storages = {}
def _parse_section(
self, section_type: str, name: str, options: dict[str, Any]
) -> None:
def _parse_section(self, section_type, name, options):
validate_section_name(name, section_type)
if name in self._seen_names:
raise ValueError(f'Name "{name}" already used.')
@ -134,9 +119,7 @@ class _ConfigReader:
else:
raise ValueError("Unknown section type.")
def parse(
self,
) -> tuple[dict[str, str], dict[str, dict[str, str]], dict[str, dict[str, str]]]:
def parse(self):
for section in self._parser.sections():
if " " in section:
section_type, name = section.split(" ", 1)
@ -150,7 +133,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)),
)
except ValueError as e:
raise exceptions.UserError(f'Section "{section}": {e!s}')
raise exceptions.UserError(f'Section "{section}": {str(e)}')
_validate_general_section(self._general)
if getattr(self._file, "name", None):
@ -162,9 +145,7 @@ class _ConfigReader:
return self._general, self._pairs, self._storages
def _parse_options(
items: list[tuple[str, str]], section: str | None = None
) -> Generator[tuple[str, dict[str, str]], None, None]:
def _parse_options(items, section=None):
for key, value in items:
try:
yield key, json.loads(value)
@ -173,18 +154,13 @@ def _parse_options(
class Config:
def __init__(
self,
general: dict[str, str],
pairs: dict[str, dict[str, str]],
storages: dict[str, dict[str, str]],
) -> None:
def __init__(self, general, pairs, storages):
self.general = general
self.storages = storages
for name, options in storages.items():
options["instance_name"] = name
self.pairs: dict[str, PairConfig] = {}
self.pairs = {}
for name, options in pairs.items():
try:
self.pairs[name] = PairConfig(self, name, options)
@ -192,12 +168,12 @@ class Config:
raise exceptions.UserError(f"Pair {name}: {e}")
@classmethod
def from_fileobject(cls, f: IO[Any]):
def from_fileobject(cls, f):
reader = _ConfigReader(f)
return cls(*reader.parse())
@classmethod
def from_filename_or_environment(cls, fname: str | None = None):
def from_filename_or_environment(cls, fname=None):
if fname is None:
fname = os.environ.get("VDIRSYNCER_CONFIG", None)
if fname is None:
@ -214,13 +190,15 @@ class Config:
except Exception as e:
raise exceptions.UserError(f"Error during reading config {fname}: {e}")
def get_storage_args(self, storage_name: str):
def get_storage_args(self, storage_name):
try:
args = self.storages[storage_name]
except KeyError:
raise exceptions.UserError(
f"Storage {storage_name!r} not found. "
f"These are the configured storages: {list(self.storages)}"
"Storage {!r} not found. "
"These are the configured storages: {}".format(
storage_name, list(self.storages)
)
)
else:
return expand_fetch_params(args)
@ -233,15 +211,14 @@ class Config:
class PairConfig:
def __init__(self, full_config: Config, name: str, options: dict[str, str]):
self._config: Config = full_config
self.name: str = name
self.name_a: str = options.pop("a")
self.name_b: str = options.pop("b")
self.implicit = options.pop("implicit", None)
def __init__(self, full_config, name, options):
self._config = full_config
self.name = name
self.name_a = options.pop("a")
self.name_b = options.pop("b")
self._partial_sync: str | None = options.pop("partial_sync", None)
self.metadata: str | tuple[()] = options.pop("metadata", ())
self._partial_sync = options.pop("partial_sync", None)
self.metadata = options.pop("metadata", None) or ()
self.conflict_resolution = self._process_conflict_resolution_param(
options.pop("conflict_resolution", None)
@ -257,17 +234,14 @@ class PairConfig:
)
else:
_validate_collections_param(self.collections)
_validate_implicit_param(self.implicit)
if options:
raise ValueError("Unknown options: {}".format(", ".join(options)))
def _process_conflict_resolution_param(
self, conflict_resolution: str | list[str] | None
):
def _process_conflict_resolution_param(self, conflict_resolution):
if conflict_resolution in (None, "a wins", "b wins"):
return conflict_resolution
if (
elif (
isinstance(conflict_resolution, list)
and len(conflict_resolution) > 1
and conflict_resolution[0] == "command"
@ -281,7 +255,8 @@ class PairConfig:
return _resolve_conflict_via_command(a, b, command, a_name, b_name)
return resolve
raise ValueError("Invalid value for `conflict_resolution`.")
else:
raise ValueError("Invalid value for `conflict_resolution`.")
# The following parameters are lazily evaluated because evaluating
# self.config_a would expand all `x.fetch` parameters. This is costly and
@ -327,10 +302,10 @@ class PairConfig:
class CollectionConfig:
def __init__(self, pair, name: str, config_a, config_b):
def __init__(self, pair, name, config_a, config_b):
self.pair = pair
self._config = pair._config
self.name: str = name
self.name = name
self.config_a = config_a
self.config_b = config_b
@ -339,16 +314,14 @@ class CollectionConfig:
load_config = Config.from_filename_or_environment
def _resolve_conflict_via_command(
a, b, command, a_name, b_name, _check_call=None
) -> Item:
def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None):
import shutil
import tempfile
if _check_call is None:
from subprocess import check_call as _check_call
from vdirsyncer.vobject import Item
from ..vobject import Item
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
try:
@ -361,7 +334,7 @@ def _resolve_conflict_via_command(
f.write(b.raw)
command[0] = expand_path(command[0])
_check_call([*command, a_tmp, b_tmp])
_check_call(command + [a_tmp, b_tmp])
with open(a_tmp) as f:
new_a = f.read()
@ -369,7 +342,7 @@ def _resolve_conflict_via_command(
new_b = f.read()
if new_a != new_b:
raise exceptions.UserError("The two files are not completely equal.")
raise exceptions.UserError("The two files are not completely " "equal.")
return Item(new_a)
finally:
shutil.rmtree(dir)

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import asyncio
import hashlib
import json
@ -9,8 +7,7 @@ import sys
import aiohttp
import aiostream
from vdirsyncer import exceptions
from .. import exceptions
from .utils import handle_collection_not_found
from .utils import handle_storage_init_error
from .utils import load_status
@ -66,19 +63,21 @@ async def collections_for_pair(
rv["collections"], pair.config_a, pair.config_b
)
)
if rv:
elif rv:
raise exceptions.UserError(
"Detected change in config file, "
f"please run `vdirsyncer discover {pair.name}`."
"please run `vdirsyncer discover {}`.".format(pair.name)
)
else:
raise exceptions.UserError(
"Please run `vdirsyncer discover {}` "
" before synchronization.".format(pair.name)
)
raise exceptions.UserError(
f"Please run `vdirsyncer discover {pair.name}` before synchronization."
)
logger.info(f"Discovering collections for pair {pair.name}")
a_discovered = DiscoverResult(pair.config_a, connector=connector)
b_discovered = DiscoverResult(pair.config_b, connector=connector)
a_discovered = _DiscoverResult(pair.config_a, connector=connector)
b_discovered = _DiscoverResult(pair.config_b, connector=connector)
if list_collections:
# TODO: We should gather data and THEN print, so it can be async.
@ -93,31 +92,24 @@ async def collections_for_pair(
connector=connector,
)
async def _handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
return await handle_collection_not_found(
config, collection, e=e, implicit_create=pair.implicit == "create"
)
# We have to use a list here because the special None/null value would get
# mangled to string (because JSON objects always have string keys).
rv = await aiostream.stream.list( # type: ignore[assignment]
rv = await aiostream.stream.list(
expand_collections(
shortcuts=pair.collections,
config_a=pair.config_a,
config_b=pair.config_b,
get_a_discovered=a_discovered.get_self,
get_b_discovered=b_discovered.get_self,
_handle_collection_not_found=_handle_collection_not_found,
_handle_collection_not_found=handle_collection_not_found,
)
)
await _sanity_check_collections(rv, connector=connector)
save_status(
base_path=status_path,
pair=pair.name,
status_path,
pair.name,
data_type="collections",
data={
"collections": list(
@ -163,7 +155,7 @@ def _expand_collections_cache(collections, config_a, config_b):
yield name, (a, b)
class DiscoverResult:
class _DiscoverResult:
def __init__(self, config, *, connector):
self._cls, _ = storage_class_from_config(config)
@ -279,8 +271,8 @@ async def _print_collections(
logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
logger.warning(
f"Failed to discover collections for {instance_name}, use `-vdebug` "
"to see the full traceback."
"Failed to discover collections for {}, use `-vdebug` "
"to see the full traceback.".format(instance_name)
)
return
logger.info(f"{instance_name}:")

View file

@ -1,13 +1,10 @@
from __future__ import annotations
import logging
import click
from vdirsyncer import exceptions
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import synchronized
from .. import exceptions
from ..utils import expand_path
from ..utils import synchronized
from . import AppContext
SUFFIX = ".fetch"
@ -68,7 +65,8 @@ def _fetch_value(opts, key):
else:
if not rv:
raise exceptions.UserError(
f"Empty value for {key}, this most likely indicates an error."
"Empty value for {}, this most likely "
"indicates an error.".format(key)
)
password_cache[cache_key] = rv
return rv
@ -88,7 +86,7 @@ def _strategy_command(*command: str, shell: bool = False):
return stdout.strip("\n")
except OSError as e:
cmd = " ".join(expanded_command)
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}")
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{str(e)}")
def _strategy_shell(*command: str):

View file

@ -1,15 +1,12 @@
from __future__ import annotations
import json
import aiohttp
from vdirsyncer import exceptions
from vdirsyncer import sync
from .. import exceptions
from .. import sync
from .config import CollectionConfig
from .discover import DiscoverResult
from .discover import collections_for_pair
from .discover import storage_class_from_config
from .discover import storage_instance_from_config
from .utils import JobFailed
from .utils import cli_logger
@ -36,8 +33,10 @@ async def prepare_pair(pair_name, collections, config, *, connector):
config_a, config_b = all_collections[collection_name]
except KeyError:
raise exceptions.UserError(
f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found."
f"These are the configured collections:\n{list(all_collections)}"
"Pair {}: Collection {} not found. These are the "
"configured collections:\n{}".format(
pair_name, json.dumps(collection_name), list(all_collections)
)
)
collection = CollectionConfig(pair, collection_name, config_a, config_b)
@ -104,7 +103,7 @@ async def repair_collection(
*,
connector: aiohttp.TCPConnector,
):
from vdirsyncer.repair import repair_storage
from ..repair import repair_storage
storage_name, collection = collection, None
if "/" in storage_name:
@ -116,14 +115,15 @@ async def repair_collection(
if collection is not None:
cli_logger.info("Discovering collections (skipping cache).")
get_discovered = DiscoverResult(config, connector=connector)
discovered = await get_discovered.get_self()
for config in discovered.values():
cls, config = storage_class_from_config(config)
async for config in cls.discover(**config): # noqa E902
if config["collection"] == collection:
break
else:
raise exceptions.UserError(
f"Couldn't find collection {collection} for storage {storage_name}."
"Couldn't find collection {} for storage {}.".format(
collection, storage_name
)
)
config["type"] = storage_type
@ -135,7 +135,7 @@ async def repair_collection(
async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector):
from vdirsyncer.metasync import metasync
from ..metasync import metasync
pair = collection.pair
status_name = get_status_name(pair.name, collection.name)
@ -143,11 +143,11 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
try:
cli_logger.info(f"Metasyncing {status_name}")
status = load_status(
general["status_path"],
pair.name,
collection.name,
data_type="metadata",
status = (
load_status(
general["status_path"], pair.name, collection.name, data_type="metadata"
)
or {}
)
a = await storage_instance_from_config(collection.config_a, connector=connector)
@ -165,9 +165,9 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
raise JobFailed
save_status(
base_path=general["status_path"],
pair=pair.name,
general["status_path"],
pair.name,
collection.name,
data_type="metadata",
data=status,
collection=collection.name,
)

View file

@ -1,29 +1,24 @@
from __future__ import annotations
import contextlib
import errno
import importlib
import json
import os
import sys
from typing import Any
import aiohttp
import click
from atomicwrites import atomic_write
from vdirsyncer import BUGTRACKER_HOME
from vdirsyncer import DOCS_HOME
from vdirsyncer import exceptions
from vdirsyncer.storage.base import Storage
from vdirsyncer.sync.exceptions import IdentConflict
from vdirsyncer.sync.exceptions import PartialSync
from vdirsyncer.sync.exceptions import StorageEmpty
from vdirsyncer.sync.exceptions import SyncConflict
from vdirsyncer.sync.status import SqliteStatus
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_storage_init_args
from .. import BUGTRACKER_HOME
from .. import DOCS_HOME
from .. import exceptions
from ..sync.exceptions import IdentConflict
from ..sync.exceptions import PartialSync
from ..sync.exceptions import StorageEmpty
from ..sync.exceptions import SyncConflict
from ..sync.status import SqliteStatus
from ..utils import expand_path
from ..utils import get_storage_init_args
from . import cli_logger
STATUS_PERMISSIONS = 0o600
@ -31,8 +26,8 @@ STATUS_DIR_PERMISSIONS = 0o700
class _StorageIndex:
def __init__(self) -> None:
self._storages: dict[str, str] = {
def __init__(self):
self._storages = {
"caldav": "vdirsyncer.storage.dav.CalDAVStorage",
"carddav": "vdirsyncer.storage.dav.CardDAVStorage",
"filesystem": "vdirsyncer.storage.filesystem.FilesystemStorage",
@ -42,7 +37,7 @@ class _StorageIndex:
"google_contacts": "vdirsyncer.storage.google.GoogleContactsStorage",
}
def __getitem__(self, name: str) -> Storage:
def __getitem__(self, name):
item = self._storages[name]
if not isinstance(item, str):
return item
@ -79,27 +74,33 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.critical(e)
except StorageEmpty as e:
cli_logger.error(
f'{status_name}: Storage "{e.empty_storage.instance_name}" was '
"completely emptied. If you want to delete ALL entries on BOTH sides,"
f"then use `vdirsyncer sync --force-delete {status_name}`. "
f"Otherwise delete the files for {status_name} in your status "
"directory."
'{status_name}: Storage "{name}" was completely emptied. If you '
"want to delete ALL entries on BOTH sides, then use "
"`vdirsyncer sync --force-delete {status_name}`. "
"Otherwise delete the files for {status_name} in your status "
"directory.".format(
name=e.empty_storage.instance_name, status_name=status_name
)
)
except PartialSync as e:
cli_logger.error(
f"{status_name}: Attempted change on {e.storage}, which is read-only"
"{status_name}: Attempted change on {storage}, which is read-only"
". Set `partial_sync` in your pair section to `ignore` to ignore "
"those changes, or `revert` to revert them on the other side."
"those changes, or `revert` to revert them on the other side.".format(
status_name=status_name, storage=e.storage
)
)
except SyncConflict as e:
cli_logger.error(
f"{status_name}: One item changed on both sides. Resolve this "
"{status_name}: One item changed on both sides. Resolve this "
"conflict manually, or by setting the `conflict_resolution` "
"parameter in your config file.\n"
f"See also {DOCS_HOME}/config.html#pair-section\n"
f"Item ID: {e.ident}\n"
f"Item href on side A: {e.href_a}\n"
f"Item href on side B: {e.href_b}\n"
"See also {docs}/config.html#pair-section\n"
"Item ID: {e.ident}\n"
"Item href on side A: {e.href_a}\n"
"Item href on side B: {e.href_b}\n".format(
status_name=status_name, e=e, docs=DOCS_HOME
)
)
except IdentConflict as e:
cli_logger.error(
@ -120,17 +121,17 @@ def handle_cli_error(status_name=None, e=None):
pass
except exceptions.PairNotFound as e:
cli_logger.error(
f"Pair {e.pair_name} does not exist. Please check your "
"Pair {pair_name} does not exist. Please check your "
"configuration file and make sure you've typed the pair name "
"correctly"
"correctly".format(pair_name=e.pair_name)
)
except exceptions.InvalidResponse as e:
cli_logger.error(
"The server returned something vdirsyncer doesn't understand. "
f"Error message: {e!r}\n"
"Error message: {!r}\n"
"While this is most likely a serverside problem, the vdirsyncer "
"devs are generally interested in such bugs. Please report it in "
f"the issue tracker at {BUGTRACKER_HOME}"
"the issue tracker at {}".format(e, BUGTRACKER_HOME)
)
except exceptions.CollectionRequired:
cli_logger.error(
@ -153,18 +154,13 @@ def handle_cli_error(status_name=None, e=None):
cli_logger.debug("".join(tb))
def get_status_name(pair: str, collection: str | None) -> str:
def get_status_name(pair, collection):
if collection is None:
return pair
return pair + "/" + collection
def get_status_path(
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> str:
def get_status_path(base_path, pair, collection=None, data_type=None):
assert data_type is not None
status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name))
@ -178,15 +174,10 @@ def get_status_path(
return path
def load_status(
base_path: str,
pair: str,
collection: str | None = None,
data_type: str | None = None,
) -> dict[str, Any]:
def load_status(base_path, pair, collection=None, data_type=None):
path = get_status_path(base_path, pair, collection, data_type)
if not os.path.exists(path):
return {}
return None
assert_permissions(path, STATUS_PERMISSIONS)
with open(path) as f:
@ -198,7 +189,7 @@ def load_status(
return {}
def prepare_status_path(path: str) -> None:
def prepare_status_path(path):
dirname = os.path.dirname(path)
try:
@ -209,7 +200,7 @@ def prepare_status_path(path: str) -> None:
@contextlib.contextmanager
def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
def manage_sync_status(base_path, pair_name, collection_name):
path = get_status_path(base_path, pair_name, collection_name, "items")
status = None
legacy_status = None
@ -231,17 +222,12 @@ def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
prepare_status_path(path)
status = SqliteStatus(path)
with contextlib.closing(status):
yield status
yield status
def save_status(
base_path: str,
pair: str,
data_type: str,
data: dict[str, Any],
collection: str | None = None,
) -> None:
def save_status(base_path, pair, collection=None, data_type=None, data=None):
assert data_type is not None
assert data is not None
status_name = get_status_name(pair, collection)
path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
prepare_status_path(path)
@ -286,14 +272,15 @@ async def storage_instance_from_config(
except exceptions.CollectionNotFound as e:
if create:
config = await handle_collection_not_found(
config, config.get("collection", None), e=str(e), implicit_create=True
config, config.get("collection", None), e=str(e)
)
return await storage_instance_from_config(
config,
create=False,
connector=connector,
)
raise
else:
raise
except Exception:
return handle_storage_init_error(cls, new_config)
@ -332,18 +319,18 @@ def handle_storage_init_error(cls, config):
)
def assert_permissions(path: str, wanted: int) -> None:
def assert_permissions(path, wanted):
permissions = os.stat(path).st_mode & 0o777
if permissions > wanted:
cli_logger.warning(
f"Correcting permissions of {path} from {permissions:o} to {wanted:o}"
"Correcting permissions of {} from {:o} to {:o}".format(
path, permissions, wanted
)
)
os.chmod(path, wanted)
async def handle_collection_not_found(
config, collection, e=None, implicit_create=False
):
async def handle_collection_not_found(config, collection, e=None):
storage_name = config.get("instance_name", None)
cli_logger.warning(
@ -352,7 +339,7 @@ async def handle_collection_not_found(
)
)
if implicit_create or click.confirm("Should vdirsyncer attempt to create it?"):
if click.confirm("Should vdirsyncer attempt to create it?"):
storage_type = config["type"]
cls, config = storage_class_from_config(config)
config["collection"] = collection
@ -364,7 +351,7 @@ async def handle_collection_not_found(
cli_logger.error(e)
raise exceptions.UserError(
f'Unable to find or create collection "{collection}" for '
f'storage "{storage_name}". Please create the collection '
"yourself."
'Unable to find or create collection "{collection}" for '
'storage "{storage}". Please create the collection '
"yourself.".format(collection=collection, storage=storage_name)
)

View file

@ -3,8 +3,6 @@ Contains exception classes used by vdirsyncer. Not all exceptions are here,
only the most commonly used ones.
"""
from __future__ import annotations
class Error(Exception):
"""Baseclass for all errors."""

View file

@ -1,25 +1,9 @@
from __future__ import annotations
import asyncio
import logging
import os
import platform
import re
from abc import ABC
from abc import abstractmethod
from base64 import b64encode
from ssl import create_default_context
import aiohttp
import requests.auth
from aiohttp import ServerDisconnectedError
from aiohttp import ServerTimeoutError
from requests.utils import parse_dict_header
from tenacity import retry
from tenacity import retry_if_exception_type
from tenacity import stop_after_attempt
from tenacity import wait_exponential
from . import DOCS_HOME
from . import __version__
from . import exceptions
from .utils import expand_path
@ -27,101 +11,54 @@ from .utils import expand_path
logger = logging.getLogger(__name__)
USERAGENT = f"vdirsyncer/{__version__}"
# 'hack' to prevent aiohttp from loading the netrc config,
# but still allow it to read PROXY_* env vars.
# Otherwise, if our host is defined in the netrc config,
# aiohttp will overwrite our Authorization header.
# https://github.com/pimutils/vdirsyncer/issues/1138
os.environ["NETRC"] = "NUL" if platform.system() == "Windows" else "/dev/null"
def _detect_faulty_requests(): # pragma: no cover
text = (
"Error during import: {e}\n\n"
"If you have installed vdirsyncer from a distro package, please file "
"a bug against that package, not vdirsyncer.\n\n"
"Consult {d}/problems.html#requests-related-importerrors"
"-based-distributions on how to work around this."
)
try:
from requests_toolbelt.auth.guess import GuessAuth # noqa
except ImportError as e:
import sys
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
sys.exit(1)
class AuthMethod(ABC):
def __init__(self, username, password):
self.username = username
self.password = password
@abstractmethod
def handle_401(self, response):
raise NotImplementedError
@abstractmethod
def get_auth_header(self, method, url):
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, AuthMethod):
return False
return (
self.__class__ == other.__class__
and self.username == other.username
and self.password == other.password
)
class BasicAuthMethod(AuthMethod):
def handle_401(self, _response):
pass
def get_auth_header(self, _method, _url):
auth_str = f"{self.username}:{self.password}"
return "Basic " + b64encode(auth_str.encode("utf-8")).decode("utf-8")
class DigestAuthMethod(AuthMethod):
# make class var to 'cache' the state, which is more efficient because otherwise
# each request would first require another 'initialization' request.
_auth_helpers: dict[tuple[str, str], requests.auth.HTTPDigestAuth] = {}
def __init__(self, username: str, password: str):
super().__init__(username, password)
self._auth_helper = self._auth_helpers.get(
(username, password), requests.auth.HTTPDigestAuth(username, password)
)
self._auth_helpers[(username, password)] = self._auth_helper
@property
def auth_helper_vars(self):
return self._auth_helper._thread_local
def handle_401(self, response):
s_auth = response.headers.get("www-authenticate", "")
if "digest" in s_auth.lower():
# Original source:
# https://github.com/psf/requests/blob/f12ccbef6d6b95564da8d22e280d28c39d53f0e9/src/requests/auth.py#L262-L263
pat = re.compile(r"digest ", flags=re.IGNORECASE)
self.auth_helper_vars.chal = parse_dict_header(pat.sub("", s_auth, count=1))
def get_auth_header(self, method, url):
self._auth_helper.init_per_thread_state()
if not self.auth_helper_vars.chal:
# Need to do init request first
return ""
return self._auth_helper.build_digest_header(method, url)
_detect_faulty_requests()
del _detect_faulty_requests
def prepare_auth(auth, username, password):
if username and password:
if auth == "basic" or auth is None:
return BasicAuthMethod(username, password)
if auth == "digest":
return DigestAuthMethod(username, password)
if auth == "guess":
raise exceptions.UserError(
"'Guess' authentication is not supported in this version of "
"vdirsyncer.\n"
"Please explicitly specify either 'basic' or 'digest' auth instead. \n"
"See the following issue for more information: "
"https://github.com/pimutils/vdirsyncer/issues/1015"
)
return aiohttp.BasicAuth(username, password)
elif auth == "digest":
from requests.auth import HTTPDigestAuth
return HTTPDigestAuth(username, password)
elif auth == "guess":
try:
from requests_toolbelt.auth.guess import GuessAuth
except ImportError:
raise exceptions.UserError(
"Your version of requests_toolbelt is too "
"old for `guess` authentication. At least "
"version 0.4.0 is required."
)
else:
return GuessAuth(username, password)
else:
raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth:
raise exceptions.UserError(
f"You need to specify username and password for {auth} authentication."
"You need to specify username and password "
"for {} authentication.".format(auth)
)
return None
@ -155,79 +92,18 @@ def prepare_client_cert(cert):
return cert
class TransientNetworkError(exceptions.Error):
"""Transient network condition that should be retried."""
def _is_safe_to_retry_method(method: str) -> bool:
"""Returns True if the HTTP method is safe/idempotent to retry.
We consider these safe for our WebDAV usage:
- GET, HEAD, OPTIONS: standard safe methods
- PROPFIND, REPORT: read-only DAV queries used for listing/fetching
"""
return method.upper() in {"GET", "HEAD", "OPTIONS", "PROPFIND", "REPORT"}
class UsageLimitReached(exceptions.Error):
pass
async def _is_quota_exceeded_google(response: aiohttp.ClientResponse) -> bool:
"""Return True if the response JSON indicates Google-style `usageLimits` exceeded.
Expected shape:
{"error": {"errors": [{"domain": "usageLimits", ...}], ...}}
See https://developers.google.com/workspace/calendar/api/guides/errors#403_usage_limits_exceeded
"""
try:
data = await response.json(content_type=None)
except Exception:
return False
if not isinstance(data, dict):
return False
error = data.get("error")
if not isinstance(error, dict):
return False
errors = error.get("errors")
if not isinstance(errors, list):
return False
for entry in errors:
if isinstance(entry, dict) and entry.get("domain") == "usageLimits":
return True
return False
@retry(
stop=stop_after_attempt(5),
wait=wait_exponential(multiplier=1, min=4, max=10),
retry=(
retry_if_exception_type(UsageLimitReached)
| retry_if_exception_type(TransientNetworkError)
),
reraise=True,
)
async def request(
method,
url,
session,
auth=None,
latin1_fallback=True,
**kwargs,
):
"""Wrapper method for requests, to ease logging and mocking as well as to
support auth methods currently unsupported by aiohttp.
"""Wrapper method for requests, to ease logging and mocking.
Parameters should be the same as for ``aiohttp.request``, except:
Parameters should be the same as for ``aiohttp.request``, as well as:
:param session: A requests session object to use.
:param auth: The HTTP ``AuthMethod`` to use for authentication.
:param verify_fingerprint: Optional. SHA256 of the expected server certificate.
:param latin1_fallback: RFC-2616 specifies the default Content-Type of
text/* to be latin1, which is not always correct, but exactly what
@ -246,7 +122,7 @@ async def request(
logger.debug("=" * 20)
logger.debug(f"{method} {url}")
logger.debug(kwargs.get("headers", {}))
logger.debug(kwargs.get("data"))
logger.debug(kwargs.get("data", None))
logger.debug("Sending request...")
assert isinstance(kwargs.get("data", b""), bytes)
@ -257,43 +133,7 @@ async def request(
ssl_context.load_cert_chain(*cert)
kwargs["ssl"] = ssl_context
headers = kwargs.pop("headers", {})
response: aiohttp.ClientResponse | None = None
for _attempt in range(2):
if auth:
headers["Authorization"] = auth.get_auth_header(method, url)
try:
response = await session.request(method, url, headers=headers, **kwargs)
except (
ServerDisconnectedError,
ServerTimeoutError,
asyncio.TimeoutError,
) as e:
# Retry only if the method is safe/idempotent for our DAV use
if _is_safe_to_retry_method(method):
logger.debug(
f"Transient network error on {method} {url}: {e}. Will retry."
)
raise TransientNetworkError(str(e)) from e
raise e from None
if response is None:
raise RuntimeError("No HTTP response obtained")
if response.ok or not auth:
# we don't need to do the 401-loop if we don't do auth in the first place
break
if response.status == 401:
auth.handle_401(response)
# retry once more after handling the 401 challenge
continue
else:
# some other error, will be handled later on
break
if response is None:
raise RuntimeError("No HTTP response obtained")
response = await session.request(method, url, **kwargs)
# See https://github.com/kennethreitz/requests/issues/2042
content_type = response.headers.get("Content-Type", "")
@ -309,18 +149,10 @@ async def request(
logger.debug(response.headers)
logger.debug(response.content)
if logger.getEffectiveLevel() <= logging.DEBUG and response.status >= 400:
# https://github.com/pimutils/vdirsyncer/issues/1186
logger.debug(await response.text())
if response.status == 403 and await _is_quota_exceeded_google(response):
raise UsageLimitReached(response.reason)
if response.status == 412:
raise exceptions.PreconditionFailed(response.reason)
if response.status in (404, 410):
raise exceptions.NotFoundError(response.reason)
if response.status == 429:
raise UsageLimitReached(response.reason)
response.raise_for_status()
return response

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import logging
from . import exceptions
@ -57,7 +55,7 @@ async def metasync(storage_a, storage_b, status, keys, conflict_resolution=None)
logger.debug(f"B: {b}")
logger.debug(f"S: {s}")
if (a != s and b != s) or storage_a.read_only or storage_b.read_only:
if a != s and b != s or storage_a.read_only or storage_b.read_only:
await _resolve_conflict()
elif a != s and b == s:
await _a_to_b()

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import logging
from os.path import basename
@ -26,9 +24,9 @@ async def repair_storage(storage, repair_unsafe_uid):
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)
except IrreparableItem:
logger.error(
f"Item {href!r} is malformed beyond repair. "
"Item {!r} is malformed beyond repair. "
"The PRODID property may indicate which software "
"created this item."
"created this item.".format(href)
)
logger.error(f"Item content: {item.raw!r}")
continue
@ -56,7 +54,9 @@ def repair_item(href, item, seen_uids, repair_unsafe_uid):
new_item = item.with_uid(generate_href())
elif not href_safe(item.uid) or not href_safe(basename(href)):
if not repair_unsafe_uid:
logger.warning("UID may cause problems, add --repair-unsafe-uid to repair.")
logger.warning(
"UID may cause problems, add " "--repair-unsafe-uid to repair."
)
else:
logger.warning("UID or href is unsafe, assigning random UID.")
new_item = item.with_uid(generate_href())

View file

@ -1,15 +1,16 @@
from __future__ import annotations
import contextlib
import functools
from abc import ABCMeta
from abc import abstractmethod
from collections.abc import Iterable
from typing import Iterable
from typing import List
from typing import Optional
from vdirsyncer import exceptions
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from .. import exceptions
from ..utils import uniq
def mutating_storage_method(f):
"""Wrap a method and fail if the instance is readonly."""
@ -33,6 +34,7 @@ class StorageMeta(ABCMeta):
class Storage(metaclass=StorageMeta):
"""Superclass of all storages, interface that all storages have to
implement.
@ -65,37 +67,21 @@ class Storage(metaclass=StorageMeta):
# The machine-readable name of this collection.
collection = None
# A value of False means storage does not support delete requests. A
# value of True mean the storage supports it.
no_delete = False
# A value of True means the storage does not support write-methods such as
# upload, update and delete. A value of False means the storage does
# support those methods.
read_only = False
# The attribute values to show in the representation of the storage.
_repr_attributes: tuple[str, ...] = ()
_repr_attributes: List[str] = []
def __init__(
self,
instance_name=None,
read_only=None,
no_delete=None,
collection=None,
):
def __init__(self, instance_name=None, read_only=None, collection=None):
if read_only is None:
read_only = self.read_only
if self.read_only and not read_only:
raise exceptions.UserError("This storage can only be read-only.")
self.read_only = bool(read_only)
if no_delete is None:
no_delete = self.no_delete
if self.no_delete and not no_delete:
raise exceptions.UserError("Nothing can be deleted in this storage.")
self.no_delete = bool(no_delete)
if collection and instance_name:
instance_name = f"{instance_name}/{collection}"
self.instance_name = instance_name
@ -140,17 +126,19 @@ class Storage(metaclass=StorageMeta):
except ValueError:
pass
attrs = {x: getattr(self, x) for x in self._repr_attributes}
return f"<{self.__class__.__name__}(**{attrs})>"
return "<{}(**{})>".format(
self.__class__.__name__,
{x: getattr(self, x) for x in self._repr_attributes},
)
@abstractmethod
async def list(self) -> list[tuple]:
async def list(self) -> List[tuple]:
"""
:returns: list of (href, etag)
"""
@abstractmethod
async def get(self, href: str) -> tuple[Item, str]:
async def get(self, href: str):
"""Fetch a single item.
:param href: href to fetch
@ -239,7 +227,7 @@ class Storage(metaclass=StorageMeta):
"""
yield
async def get_meta(self, key: str) -> str | None:
async def get_meta(self, key: str) -> Optional[str]:
"""Get metadata value for collection/storage.
See the vdir specification for the keys that *have* to be accepted.
@ -249,7 +237,7 @@ class Storage(metaclass=StorageMeta):
"""
raise NotImplementedError("This storage does not support metadata.")
async def set_meta(self, key: str, value: str | None):
async def set_meta(self, key: str, value: Optional[str]):
"""Set metadata value for collection/storage.
:param key: The metadata key.
@ -258,7 +246,7 @@ class Storage(metaclass=StorageMeta):
raise NotImplementedError("This storage does not support metadata.")
def normalize_meta_value(value) -> str | None:
def normalize_meta_value(value) -> Optional[str]:
# `None` is returned by iCloud for empty properties.
if value is None or value == "None":
return None

View file

@ -1,28 +1,26 @@
from __future__ import annotations
import contextlib
import datetime
import logging
import urllib.parse as urlparse
import xml.etree.ElementTree as etree
from abc import abstractmethod
from functools import cached_property
from inspect import getfullargspec
from inspect import signature
from typing import Optional
from typing import Type
import aiohttp
import aiostream
from vdirsyncer import exceptions
from vdirsyncer import http
from vdirsyncer import utils
from vdirsyncer.exceptions import Error
from vdirsyncer.http import USERAGENT
from vdirsyncer.http import prepare_auth
from vdirsyncer.http import prepare_client_cert
from vdirsyncer.http import prepare_verify
from vdirsyncer.vobject import Item
from .. import exceptions
from .. import http
from .. import utils
from ..http import USERAGENT
from ..http import prepare_auth
from ..http import prepare_client_cert
from ..http import prepare_verify
from .base import Storage
from .base import normalize_meta_value
@ -94,7 +92,8 @@ def _parse_xml(content):
return etree.XML(_clean_body(content))
except etree.ParseError as e:
raise InvalidXMLResponse(
f"Invalid XML encountered: {e}\nDouble-check the URLs in your config."
"Invalid XML encountered: {}\n"
"Double-check the URLs in your config.".format(e)
)
@ -115,8 +114,10 @@ def _fuzzy_matches_mimetype(strict, weak):
if strict is None or weak is None:
return True
_mediatype, subtype = strict.split("/")
return subtype in weak
mediatype, subtype = strict.split("/")
if subtype in weak:
return True
return False
class Discover:
@ -127,7 +128,7 @@ class Discover:
@property
@abstractmethod
def _resourcetype(self) -> str | None:
def _resourcetype(self) -> Optional[str]:
pass
@property
@ -197,7 +198,9 @@ class Discover:
# E.g. Synology NAS
# See https://github.com/pimutils/vdirsyncer/issues/498
dav_logger.debug(
f"No current-user-principal returned, re-using URL {response.url}"
"No current-user-principal returned, re-using URL {}".format(
response.url
)
)
return response.url.human_repr()
return urlparse.urljoin(str(response.url), rv.text).rstrip("/") + "/"
@ -219,8 +222,10 @@ class Discover:
async def find_collections(self):
rv = None
with contextlib.suppress(aiohttp.ClientResponseError, exceptions.Error):
try:
rv = await aiostream.stream.list(self._find_collections_impl(""))
except (aiohttp.ClientResponseError, exceptions.Error):
pass
if rv:
return rv
@ -235,7 +240,7 @@ class Discover:
return True
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
if props is None or not props:
if props is None or not len(props):
dav_logger.debug("Skipping, missing <prop>: %s", response)
return False
if props.find("{DAV:}resourcetype/" + self._resourcetype) is None:
@ -259,7 +264,7 @@ class Discover:
href = response.find("{DAV:}href")
if href is None:
raise InvalidXMLResponse("Missing href tag for collection props.")
raise InvalidXMLResponse("Missing href tag for collection " "props.")
href = urlparse.urljoin(str(r.url), href.text)
if href not in done:
done.add(href)
@ -308,7 +313,9 @@ class Discover:
</mkcol>
""".format(
etree.tostring(etree.Element(self._resourcetype), encoding="unicode")
).encode("utf-8")
).encode(
"utf-8"
)
response = await self.session.request(
"MKCOL",
@ -321,7 +328,7 @@ class Discover:
class CalDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:caldav"
_resourcetype = f"{{{_namespace}}}calendar"
_resourcetype = "{%s}calendar" % _namespace
_homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav">
<prop>
@ -329,13 +336,13 @@ class CalDiscover(Discover):
</prop>
</propfind>
"""
_homeset_tag = f"{{{_namespace}}}calendar-home-set"
_homeset_tag = "{%s}calendar-home-set" % _namespace
_well_known_uri = "/.well-known/caldav"
class CardDiscover(Discover):
_namespace = "urn:ietf:params:xml:ns:carddav"
_resourcetype: str | None = f"{{{_namespace}}}addressbook"
_resourcetype: Optional[str] = "{%s}addressbook" % _namespace
_homeset_xml = b"""
<propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav">
<prop>
@ -343,7 +350,7 @@ class CardDiscover(Discover):
</prop>
</propfind>
"""
_homeset_tag = f"{{{_namespace}}}addressbook-home-set"
_homeset_tag = "{%s}addressbook-home-set" % _namespace
_well_known_uri = "/.well-known/carddav"
@ -391,7 +398,7 @@ class DAVSession:
self.url = url.rstrip("/") + "/"
self.connector = connector
@cached_property
@utils.cached_property
def parsed_url(self):
return urlparse.urlparse(self.url)
@ -444,7 +451,7 @@ class DAVStorage(Storage):
@property
@abstractmethod
def discovery_class(self) -> type[Discover]:
def discovery_class(self) -> Type[Discover]:
"""Discover subclass to use."""
# The DAVSession class to use
@ -452,7 +459,7 @@ class DAVStorage(Storage):
connector: aiohttp.TCPConnector
_repr_attributes = ("username", "url")
_repr_attributes = ["username", "url"]
_property_table = {
"displayname": ("displayname", "DAV:"),
@ -497,12 +504,8 @@ class DAVStorage(Storage):
def _is_item_mimetype(self, mimetype):
return _fuzzy_matches_mimetype(self.item_mimetype, mimetype)
async def get(self, href: str) -> tuple[Item, str]:
actual_href: str
item: Item
etag: str
((actual_href, item, etag),) = await aiostream.stream.list( # type: ignore[misc]
async def get(self, href: str):
((actual_href, item, etag),) = await aiostream.stream.list(
self.get_multi([href])
)
assert href == actual_href
@ -628,7 +631,7 @@ class DAVStorage(Storage):
continue
props = response.findall("{DAV:}propstat/{DAV:}prop")
if props is None or not props:
if props is None or not len(props):
dav_logger.debug(f"Skipping {href!r}, properties are missing.")
continue
else:
@ -646,7 +649,9 @@ class DAVStorage(Storage):
contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None)
if not self._is_item_mimetype(contenttype):
dav_logger.debug(
f"Skipping {href!r}, {contenttype!r} != {self.item_mimetype!r}."
"Skipping {!r}, {!r} != {!r}.".format(
href, contenttype, self.item_mimetype
)
)
continue
@ -681,7 +686,7 @@ class DAVStorage(Storage):
for href, etag, _prop in rv:
yield href, etag
async def get_meta(self, key) -> str | None:
async def get_meta(self, key) -> Optional[str]:
try:
tagname, namespace = self._property_table[key]
except KeyError:
@ -740,7 +745,9 @@ class DAVStorage(Storage):
""".format(
etree.tostring(element, encoding="unicode"),
action=action,
).encode("utf-8")
).encode(
"utf-8"
)
await self.session.request(
"PROPPATCH",
@ -794,7 +801,7 @@ class CalDAVStorage(DAVStorage):
self.item_types = tuple(item_types)
if (start_date is None) != (end_date is None):
raise exceptions.UserError(
"If start_date is given, end_date has to be given too."
"If start_date is given, " "end_date has to be given too."
)
elif start_date is not None and end_date is not None:
namespace = dict(datetime.__dict__)
@ -824,7 +831,9 @@ class CalDAVStorage(DAVStorage):
start = start.strftime(CALDAV_DT_FORMAT)
end = end.strftime(CALDAV_DT_FORMAT)
timefilter = f'<C:time-range start="{start}" end="{end}"/>'
timefilter = '<C:time-range start="{start}" end="{end}"/>'.format(
start=start, end=end
)
else:
timefilter = ""
@ -892,21 +901,14 @@ class CardDAVStorage(DAVStorage):
item_mimetype = "text/vcard"
discovery_class = CardDiscover
def __init__(self, *args, use_vcard_4=False, **kwargs):
self.use_vcard_4 = use_vcard_4
super().__init__(*args, **kwargs)
@property
def get_multi_template(self):
ct = 'Content-Type="text/vcard" version="4.0"' if self.use_vcard_4 else ""
return f"""<?xml version="1.0" encoding="utf-8" ?>
get_multi_template = """<?xml version="1.0" encoding="utf-8" ?>
<C:addressbook-multiget xmlns="DAV:"
xmlns:C="urn:ietf:params:xml:ns:carddav">
<prop>
<getetag/>
<C:address-data {ct}/>
<C:address-data/>
</prop>
{{hrefs}}
{hrefs}
</C:addressbook-multiget>"""
get_multi_data_query = "{urn:ietf:params:xml:ns:carddav}address-data"

View file

@ -1,19 +1,16 @@
from __future__ import annotations
import contextlib
import errno
import logging
import os
import subprocess
from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkdir
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import generate_href
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.vobject import Item
from atomicwrites import atomic_write
from .. import exceptions
from ..utils import checkdir
from ..utils import expand_path
from ..utils import generate_href
from ..utils import get_etag_from_file
from ..vobject import Item
from .base import Storage
from .base import normalize_meta_value
@ -22,7 +19,7 @@ logger = logging.getLogger(__name__)
class FilesystemStorage(Storage):
storage_name = "filesystem"
_repr_attributes = ("path",)
_repr_attributes = ["path"]
def __init__(
self,
@ -30,7 +27,6 @@ class FilesystemStorage(Storage):
fileext,
encoding="utf-8",
post_hook=None,
pre_deletion_hook=None,
fileignoreext=".tmp",
**kwargs,
):
@ -42,7 +38,6 @@ class FilesystemStorage(Storage):
self.fileext = fileext
self.fileignoreext = fileignoreext
self.post_hook = post_hook
self.pre_deletion_hook = pre_deletion_hook
@classmethod
async def discover(cls, path, **kwargs):
@ -66,7 +61,9 @@ class FilesystemStorage(Storage):
def _validate_collection(cls, path):
if not os.path.isdir(path):
return False
return not os.path.basename(path).startswith(".")
if os.path.basename(path).startswith("."):
return False
return True
@classmethod
async def create_collection(cls, collection, **kwargs):
@ -98,7 +95,7 @@ class FilesystemStorage(Storage):
):
yield fname, get_etag_from_file(fpath)
async def get(self, href) -> tuple[Item, str]:
async def get(self, href):
fpath = self._get_filepath(href)
try:
with open(fpath, "rb") as f:
@ -167,9 +164,6 @@ class FilesystemStorage(Storage):
actual_etag = get_etag_from_file(fpath)
if etag != actual_etag:
raise exceptions.WrongEtagError(etag, actual_etag)
if self.pre_deletion_hook:
self._run_pre_deletion_hook(fpath)
os.remove(fpath)
def _run_post_hook(self, fpath):
@ -177,16 +171,7 @@ class FilesystemStorage(Storage):
try:
subprocess.call([self.post_hook, fpath])
except OSError as e:
logger.warning(f"Error executing external hook: {e!s}")
def _run_pre_deletion_hook(self, fpath):
logger.info(
f"Calling pre_deletion_hook={self.pre_deletion_hook} with argument={fpath}"
)
try:
subprocess.call([self.pre_deletion_hook, fpath])
except OSError as e:
logger.warning(f"Error executing external hook: {e!s}")
logger.warning(f"Error executing external hook: {str(e)}")
async def get_meta(self, key):
fpath = os.path.join(self.path, key)
@ -204,8 +189,10 @@ class FilesystemStorage(Storage):
fpath = os.path.join(self.path, key)
if value is None:
with contextlib.suppress(OSError):
try:
os.remove(fpath)
except OSError:
pass
else:
with atomic_write(fpath, mode="wb", overwrite=True) as f:
f.write(value.encode(self.encoding))

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import json
import logging
import os
@ -11,13 +9,12 @@ from threading import Thread
import aiohttp
import click
from atomicwrites import atomic_write
from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkdir
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import open_graphical_browser
from .. import exceptions
from ..utils import checkdir
from ..utils import expand_path
from ..utils import open_graphical_browser
from . import base
from . import dav
from .google_helpers import _RedirectWSGIApp
@ -99,7 +96,6 @@ class GoogleSession(dav.DAVSession):
token_updater=self._save_token,
connector=self.connector,
connector_owner=False,
trust_env=True,
)
async def _init_token(self):
@ -110,8 +106,8 @@ class GoogleSession(dav.DAVSession):
pass
except ValueError as e:
raise exceptions.UserError(
f"Failed to load token file {self._token_file}, try deleting it. "
f"Original error: {e}"
"Failed to load token file {}, try deleting it. "
"Original error: {}".format(self._token_file, e)
)
if not self._token:
@ -130,7 +126,7 @@ class GoogleSession(dav.DAVSession):
async with self._session as session:
# Fail fast if the address is occupied
authorization_url, _state = session.authorization_url(
authorization_url, state = session.authorization_url(
TOKEN_URL,
# access_type and approval_prompt are Google specific
# extra parameters.

View file

@ -2,14 +2,15 @@
#
# Based on:
# https://github.com/googleapis/google-auth-library-python-oauthlib/blob/1fb16be1bad9050ee29293541be44e41e82defd7/google_auth_oauthlib/flow.py#L513
from __future__ import annotations
import logging
import wsgiref.simple_server
import wsgiref.util
from collections.abc import Iterable
from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterable
from typing import Optional
logger = logging.getLogger(__name__)
@ -28,7 +29,7 @@ class _RedirectWSGIApp:
Stores the request URI and displays the given success message.
"""
last_request_uri: str | None
last_request_uri: Optional[str]
def __init__(self, success_message: str):
"""
@ -40,7 +41,7 @@ class _RedirectWSGIApp:
def __call__(
self,
environ: dict[str, Any],
environ: Dict[str, Any],
start_response: Callable[[str, list], None],
) -> Iterable[bytes]:
"""WSGI Callable.

View file

@ -1,29 +1,22 @@
from __future__ import annotations
import logging
import subprocess
import urllib.parse as urlparse
import aiohttp
from vdirsyncer import exceptions
from vdirsyncer.http import USERAGENT
from vdirsyncer.http import prepare_auth
from vdirsyncer.http import prepare_client_cert
from vdirsyncer.http import prepare_verify
from vdirsyncer.http import request
from vdirsyncer.vobject import Item
from vdirsyncer.vobject import split_collection
from .. import exceptions
from ..http import USERAGENT
from ..http import prepare_auth
from ..http import prepare_client_cert
from ..http import prepare_verify
from ..http import request
from ..vobject import Item
from ..vobject import split_collection
from .base import Storage
logger = logging.getLogger(__name__)
class HttpStorage(Storage):
storage_name = "http"
read_only = True
_repr_attributes = ("username", "url")
_repr_attributes = ["username", "url"]
_items = None
# Required for tests.
@ -39,10 +32,9 @@ class HttpStorage(Storage):
useragent=USERAGENT,
verify_fingerprint=None,
auth_cert=None,
filter_hook=None,
*,
connector,
**kwargs,
**kwargs
) -> None:
super().__init__(**kwargs)
@ -62,7 +54,6 @@ class HttpStorage(Storage):
self.useragent = useragent
assert connector is not None
self.connector = connector
self._filter_hook = filter_hook
collection = kwargs.get("collection")
if collection is not None:
@ -73,19 +64,6 @@ class HttpStorage(Storage):
def _default_headers(self):
return {"User-Agent": self.useragent}
def _run_filter_hook(self, raw_item):
try:
result = subprocess.run(
[self._filter_hook],
input=raw_item,
capture_output=True,
encoding="utf-8",
)
return result.stdout
except OSError as e:
logger.warning(f"Error executing external command: {e!s}")
return raw_item
async def list(self):
async with aiohttp.ClientSession(
connector=self.connector,
@ -102,13 +80,8 @@ class HttpStorage(Storage):
)
self._items = {}
for raw_item in split_collection((await r.read()).decode("utf-8")):
if self._filter_hook:
raw_item = self._run_filter_hook(raw_item)
if not raw_item:
continue
item = Item(raw_item)
for item in split_collection((await r.read()).decode("utf-8")):
item = Item(item)
if self._ignore_uids:
item = item.with_uid(item.hash)
@ -117,12 +90,11 @@ class HttpStorage(Storage):
for href, (_, etag) in self._items.items():
yield href, etag
async def get(self, href) -> tuple[Item, str]:
async def get(self, href):
if self._items is None:
async for _ in self.list():
pass
assert self._items is not None # type assertion
try:
return self._items[href]
except KeyError:

View file

@ -1,10 +1,6 @@
from __future__ import annotations
import random
from vdirsyncer import exceptions
from vdirsyncer.vobject import Item
from .. import exceptions
from .base import Storage
from .base import normalize_meta_value
@ -35,7 +31,7 @@ class MemoryStorage(Storage):
for href, (etag, _item) in self.items.items():
yield href, etag
async def get(self, href) -> tuple[Item, str]:
async def get(self, href):
etag, item = self.items[href]
return item, etag

View file

@ -1,36 +1,27 @@
from __future__ import annotations
import collections
import contextlib
import functools
import glob
import logging
import os
from collections.abc import Iterable
from typing import Iterable
from vdirsyncer import exceptions
from vdirsyncer.utils import atomic_write
from vdirsyncer.utils import checkfile
from vdirsyncer.utils import expand_path
from vdirsyncer.utils import get_etag_from_file
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from vdirsyncer.vobject import join_collection
from vdirsyncer.vobject import split_collection
from atomicwrites import atomic_write
from .. import exceptions
from ..utils import checkfile
from ..utils import expand_path
from ..utils import get_etag_from_file
from ..utils import uniq
from ..vobject import Item
from ..vobject import join_collection
from ..vobject import split_collection
from .base import Storage
logger = logging.getLogger(__name__)
def _writing_op(f):
"""Implement at_once for write operations.
Wrap an operation which writes to the storage, implementing `at_once` if it has been
requested. Changes are stored in-memory until the at_once block finishes, at which
time they are all written at once.
"""
@functools.wraps(f)
async def inner(self, *args, **kwargs):
if self._items is None or not self._at_once:
@ -47,7 +38,7 @@ def _writing_op(f):
class SingleFileStorage(Storage):
storage_name = "singlefile"
_repr_attributes = ("path",)
_repr_attributes = ["path"]
_write_mode = "wb"
_append_mode = "ab"
@ -102,7 +93,7 @@ class SingleFileStorage(Storage):
path = path % (collection,)
except TypeError:
raise ValueError(
"Exactly one %s required in path if collection is not null."
"Exactly one %s required in path " "if collection is not null."
)
checkfile(path, create=True)
@ -133,12 +124,11 @@ class SingleFileStorage(Storage):
yield href, etag
async def get(self, href) -> tuple[Item, str]:
async def get(self, href):
if self._items is None or not self._at_once:
async for _ in self.list():
pass
assert self._items is not None # type assertion
try:
return self._items[href]
except KeyError:
@ -187,9 +177,11 @@ class SingleFileStorage(Storage):
self.path
):
raise exceptions.PreconditionFailed(
f"Some other program modified the file {self.path!r}. Re-run the "
"synchronization and make sure absolutely no other program is "
"writing into the same file."
(
"Some other program modified the file {!r}. Re-run the "
"synchronization and make sure absolutely no other program is "
"writing into the same file."
).format(self.path)
)
text = join_collection(item.raw for item, etag in self._items.values())
try:

View file

@ -9,18 +9,15 @@ Yang: http://blog.ezyang.com/2012/08/how-offlineimap-works/
Some modifications to it are explained in
https://unterwaditzer.net/2016/sync-algorithm.html
"""
from __future__ import annotations
import contextlib
import itertools
import logging
from vdirsyncer.exceptions import UserError
from vdirsyncer.storage.base import Storage
from vdirsyncer.utils import uniq
from vdirsyncer.vobject import Item
from ..exceptions import UserError
from ..utils import uniq
from .exceptions import BothReadOnly
from .exceptions import IdentAlreadyExists
from .exceptions import PartialSync
@ -136,13 +133,9 @@ async def sync(
raise BothReadOnly
if conflict_resolution == "a wins":
def conflict_resolution(a, b):
return a
conflict_resolution = lambda a, b: a # noqa: E731
elif conflict_resolution == "b wins":
def conflict_resolution(a, b):
return b
conflict_resolution = lambda a, b: b # noqa: E731
status_nonempty = bool(next(status.iter_old(), None))
@ -212,7 +205,9 @@ class Upload(Action):
href = etag = None
else:
sync_logger.info(
f"Copying (uploading) item {self.ident} to {self.dest.storage}"
"Copying (uploading) item {} to {}".format(
self.ident, self.dest.storage
)
)
href, etag = await self.dest.storage.upload(self.item)
assert href is not None
@ -248,11 +243,7 @@ class Delete(Action):
async def _run_impl(self, a, b):
meta = self.dest.status.get_new(self.ident)
if self.dest.storage.read_only or self.dest.storage.no_delete:
sync_logger.debug(
f"Skipping deletion of item {self.ident} from {self.dest.storage}"
)
else:
if not self.dest.storage.read_only:
sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}")
await self.dest.storage.delete(meta.href, meta.etag)
@ -300,7 +291,7 @@ class ResolveConflict(Action):
)
def _get_actions(a_info: _StorageInfo, b_info: _StorageInfo):
def _get_actions(a_info, b_info):
for ident in uniq(
itertools.chain(
a_info.status.parent.iter_new(), a_info.status.parent.iter_old()

View file

@ -1,6 +1,4 @@
from __future__ import annotations
from vdirsyncer import exceptions
from .. import exceptions
class SyncError(exceptions.Error):

View file

@ -1,5 +1,3 @@
from __future__ import annotations
import abc
import contextlib
import sqlite3
@ -169,11 +167,6 @@ class SqliteStatus(_StatusBase):
); """
)
def close(self):
if self._c:
self._c.close()
self._c = None
def _is_latest_version(self):
try:
return bool(
@ -192,7 +185,7 @@ class SqliteStatus(_StatusBase):
self._c = new_c
yield
self._c.execute("DELETE FROM status")
self._c.execute("INSERT INTO status SELECT * FROM new_status")
self._c.execute("INSERT INTO status " "SELECT * FROM new_status")
self._c.execute("DELETE FROM new_status")
finally:
self._c = old_c
@ -204,7 +197,7 @@ class SqliteStatus(_StatusBase):
raise IdentAlreadyExists(old_href=old_props.href, new_href=a_props.href)
b_props = self.get_new_b(ident) or ItemMetadata()
self._c.execute(
"INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
"INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)",
(
ident,
a_props.href,
@ -223,7 +216,7 @@ class SqliteStatus(_StatusBase):
raise IdentAlreadyExists(old_href=old_props.href, new_href=b_props.href)
a_props = self.get_new_a(ident) or ItemMetadata()
self._c.execute(
"INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
"INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)",
(
ident,
a_props.href,
@ -237,14 +230,14 @@ class SqliteStatus(_StatusBase):
def update_ident_a(self, ident, props):
self._c.execute(
"UPDATE new_status SET href_a=?, hash_a=?, etag_a=? WHERE ident=?",
"UPDATE new_status" " SET href_a=?, hash_a=?, etag_a=?" " WHERE ident=?",
(props.href, props.hash, props.etag, ident),
)
assert self._c.rowcount > 0
def update_ident_b(self, ident, props):
self._c.execute(
"UPDATE new_status SET href_b=?, hash_b=?, etag_b=? WHERE ident=?",
"UPDATE new_status" " SET href_b=?, hash_b=?, etag_b=?" " WHERE ident=?",
(props.href, props.hash, props.etag, ident),
)
assert self._c.rowcount > 0
@ -254,10 +247,10 @@ class SqliteStatus(_StatusBase):
def _get_impl(self, ident, side, table):
res = self._c.execute(
f"SELECT href_{side} AS href,"
f" hash_{side} AS hash,"
f" etag_{side} AS etag "
f"FROM {table} WHERE ident=?",
"SELECT href_{side} AS href,"
" hash_{side} AS hash,"
" etag_{side} AS etag "
"FROM {table} WHERE ident=?".format(side=side, table=table),
(ident,),
).fetchone()
if res is None:
@ -305,14 +298,14 @@ class SqliteStatus(_StatusBase):
return
self._c.execute(
"INSERT OR REPLACE INTO new_status VALUES (?, ?, ?, ?, ?, ?, ?)",
"INSERT OR REPLACE INTO new_status" " VALUES (?, ?, ?, ?, ?, ?, ?)",
(ident, a.href, b.href, a.hash, b.hash, a.etag, b.etag),
)
def _get_by_href_impl(self, href, default=(None, None), side=None):
res = self._c.execute(
f"SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
f"FROM status WHERE href_{side}=?",
"SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
"FROM status WHERE href_{side}=?".format(side=side),
(href,),
).fetchone()
if not res:
@ -333,7 +326,7 @@ class SqliteStatus(_StatusBase):
class SubStatus:
def __init__(self, parent: SqliteStatus, side: str):
def __init__(self, parent, side):
self.parent = parent
assert side in "ab"

View file

@ -1,10 +1,6 @@
from __future__ import annotations
import contextlib
import functools
import os
import sys
import tempfile
import uuid
from inspect import getfullargspec
from typing import Callable
@ -24,7 +20,8 @@ _missing = object()
def expand_path(p: str) -> str:
"""Expand $HOME in a path and normalise slashes."""
p = os.path.expanduser(p)
return os.path.normpath(p)
p = os.path.normpath(p)
return p
def split_dict(d: dict, f: Callable):
@ -77,7 +74,7 @@ def get_storage_init_specs(cls, stop_at=object):
spec = getfullargspec(cls.__init__)
traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True)
if traverse_superclass:
if traverse_superclass is True:
if traverse_superclass is True: # noqa
supercls = next(
getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:]
)
@ -87,7 +84,7 @@ def get_storage_init_specs(cls, stop_at=object):
else:
superspecs = ()
return (spec, *superspecs)
return (spec,) + superspecs
def get_storage_init_args(cls, stop_at=object):
@ -126,13 +123,12 @@ def checkdir(path: str, create: bool = False, mode: int = 0o750) -> None:
raise exceptions.CollectionNotFound(f"Directory {path} does not exist.")
def checkfile(path, create=False) -> None:
"""Check whether ``path`` is a file.
def checkfile(path, create=False):
"""
Check whether ``path`` is a file.
:param create: Whether to create the file's parent directories if they do
not exist.
:raises CollectionNotFound: if path does not exist.
:raises OSError: if path exists but is not a file.
"""
checkdir(os.path.dirname(path), create=create)
if not os.path.isfile(path):
@ -145,6 +141,24 @@ def checkfile(path, create=False) -> None:
raise exceptions.CollectionNotFound(f"File {path} does not exist.")
class cached_property:
"""A read-only @property that is only evaluated once. Only usable on class
instances' methods.
"""
def __init__(self, fget, doc=None):
self.__name__ = fget.__name__
self.__module__ = fget.__module__
self.__doc__ = doc or fget.__doc__
self.fget = fget
def __get__(self, obj, cls):
if obj is None: # pragma: no cover
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
def href_safe(ident, safe=SAFE_UID_CHARS):
return not bool(set(ident) - set(safe))
@ -158,7 +172,8 @@ def generate_href(ident=None, safe=SAFE_UID_CHARS):
"""
if not ident or not href_safe(ident, safe):
return str(uuid.uuid4())
return ident
else:
return ident
def synchronized(lock=None):
@ -191,7 +206,7 @@ def open_graphical_browser(url, new=0, autoraise=True):
cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"}
if webbrowser._tryorder is None: # Python 3.8
if webbrowser._tryorder is None: # Python 3.7
webbrowser.register_standard_browsers()
for name in webbrowser._tryorder:
@ -202,28 +217,4 @@ def open_graphical_browser(url, new=0, autoraise=True):
if browser.open(url, new, autoraise):
return
raise RuntimeError("No graphical browser found. Please open the URL manually.")
@contextlib.contextmanager
def atomic_write(dest, mode="wb", overwrite=False):
if "w" not in mode:
raise RuntimeError("`atomic_write` requires write access")
fd, src = tempfile.mkstemp(prefix=os.path.basename(dest), dir=os.path.dirname(dest))
file = os.fdopen(fd, mode=mode)
try:
yield file
except Exception:
os.unlink(src)
raise
else:
file.flush()
file.close()
if overwrite:
os.rename(src, dest)
else:
os.link(src, dest)
os.unlink(src)
raise RuntimeError("No graphical browser found. Please open the URL " "manually.")

View file

@ -1,10 +1,8 @@
from __future__ import annotations
import hashlib
from functools import cached_property
from itertools import chain
from itertools import tee
from .utils import cached_property
from .utils import uniq
IGNORE_PROPS = (
@ -36,6 +34,7 @@ IGNORE_PROPS = (
class Item:
"""Immutable wrapper class for VCALENDAR (VEVENT, VTODO) and
VCARD"""
@ -188,7 +187,7 @@ def join_collection(items, wrappers=_default_join_wrappers):
"""
items1, items2 = tee((_Component.parse(x) for x in items), 2)
_item_type, wrapper_type = _get_item_type(items1, wrappers)
item_type, wrapper_type = _get_item_type(items1, wrappers)
wrapper_props = []
def _get_item_components(x):
@ -231,7 +230,8 @@ def _get_item_type(components, wrappers):
if not i:
return None, None
raise ValueError("Not sure how to join components.")
else:
raise ValueError("Not sure how to join components.")
class _Component:
@ -279,12 +279,6 @@ class _Component:
stack.append(cls(c_name, [], []))
elif line.startswith("END:"):
component = stack.pop()
c_name = line[len("END:") :].strip().upper()
if c_name != component.name:
raise ValueError(
f"Got END:{c_name}, expected END:{component.name}"
+ f" at line {_i + 1}"
)
if stack:
stack[-1].subcomponents.append(component)
else:
@ -295,16 +289,12 @@ class _Component:
except IndexError:
raise ValueError(f"Parsing error at line {_i + 1}")
if len(stack) > 0:
raise ValueError(
f"Missing END for component(s): {', '.join(c.name for c in stack)}"
)
if multiple:
return rv
if len(rv) != 1:
elif len(rv) != 1:
raise ValueError(f"Found {len(rv)} components, expected one.")
return rv[0]
else:
return rv[0]
def dump_lines(self):
yield f"BEGIN:{self.name}"
@ -321,12 +311,13 @@ class _Component:
for line in lineiter:
if line.startswith(prefix):
break
new_lines.append(line)
else:
new_lines.append(line)
else:
break
for line in lineiter:
if not line.startswith((" ", "\t", *prefix)):
if not line.startswith((" ", "\t")):
new_lines.append(line)
break
@ -344,9 +335,10 @@ class _Component:
return obj not in self.subcomponents and not any(
obj in x for x in self.subcomponents
)
if isinstance(obj, str):
elif isinstance(obj, str):
return self.get(obj, None) is not None
raise ValueError(obj)
else:
raise ValueError(obj)
def __getitem__(self, key):
prefix_without_params = f"{key}:"
@ -356,7 +348,7 @@ class _Component:
if line.startswith(prefix_without_params):
rv = line[len(prefix_without_params) :]
break
if line.startswith(prefix_with_params):
elif line.startswith(prefix_with_params):
rv = line[len(prefix_with_params) :].split(":", 1)[-1]
break
else: