mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-03-25 08:55:50 +00:00
Compare commits
347 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c3262d88cc | ||
| cbb4e314f6 | |||
|
|
ac9919d865 | ||
|
|
b124ce835b | ||
|
|
6708dbbbdc | ||
|
|
81d8444810 | ||
|
|
4990cdf229 | ||
|
|
4c2c60402e | ||
|
|
2f4f4ac72b | ||
|
|
6354db82c4 | ||
|
|
a9b6488dac | ||
|
|
a4ceabf80b | ||
|
|
3488f77cd6 | ||
|
|
19120422a7 | ||
|
|
2e619806a0 | ||
|
|
4669bede07 | ||
|
|
59c1c55407 | ||
|
|
1502f5b5f4 | ||
|
|
a4d4bf8fd1 | ||
|
|
aab70e9fb0 | ||
|
|
ed88406aec | ||
|
|
ffe883a2f1 | ||
|
|
e5f2869580 | ||
|
|
95bb7bd7f9 | ||
|
|
e3b2473383 | ||
|
|
424cfc5799 | ||
|
|
29312e87c5 | ||
|
|
c77b22334a | ||
|
|
02350c924b | ||
|
|
605f878f9b | ||
|
|
bb2b71da81 | ||
|
|
065ebe4752 | ||
|
|
0d741022a9 | ||
|
|
b5d3b7e578 | ||
|
|
9677cf9812 | ||
|
|
6da84c7881 | ||
|
|
dceb113334 | ||
|
|
01fa614b6b | ||
|
|
20cc1247ed | ||
|
|
2f548e048d | ||
|
|
5d343264f3 | ||
|
|
bc3fa8bd39 | ||
|
|
8803d5a086 | ||
|
|
96754a3d0a | ||
|
|
d42707c108 | ||
|
|
ddfe3cc749 | ||
|
|
84ff0ac943 | ||
|
|
388c16f188 | ||
|
|
78f41d32ce | ||
|
|
164559ad7a | ||
|
|
2c6dc4cddf | ||
|
|
9bbb7fa91a | ||
|
|
f8bcafa9d7 | ||
|
|
162879df21 | ||
|
|
3b9db0e4db | ||
|
|
63d2e6c795 | ||
|
|
03d1c4666d | ||
|
|
ecdd565be4 | ||
|
|
17e43fd633 | ||
|
|
2b4496fea4 | ||
|
|
fc4a02c0c9 | ||
|
|
c19802e4d8 | ||
|
|
cce8fef8de | ||
|
|
9a0dbc8cd0 | ||
|
|
32453cccfc | ||
|
|
057f3af293 | ||
|
|
e76d8a5b03 | ||
|
|
d8961232c4 | ||
|
|
646e0b48a5 | ||
|
|
fb6a859b88 | ||
|
|
ff999b5b74 | ||
|
|
41b48857eb | ||
|
|
70d09e6d5d | ||
|
|
8b063c39cb | ||
|
|
12a06917db | ||
|
|
2fee1d67f2 | ||
|
|
a934d5ec66 | ||
|
|
c79d3680cd | ||
|
|
cd050d57b9 | ||
|
|
8c98992f74 | ||
|
|
c2eed9fb59 | ||
|
|
a490544405 | ||
|
|
688d6f907f | ||
|
|
2e7e31fdbf | ||
|
|
616d7aacb0 | ||
|
|
89129e37b6 | ||
|
|
88722ef4b7 | ||
|
|
35f299679f | ||
|
|
67e1c0ded5 | ||
|
|
89a01631fa | ||
|
|
611b8667a3 | ||
|
|
8550475548 | ||
|
|
cd2445b991 | ||
|
|
5ca2742271 | ||
|
|
5ac9dcec29 | ||
|
|
a513a7e4fa | ||
|
|
5ae05245e6 | ||
|
|
055ed120dd | ||
|
|
31816dc652 | ||
|
|
2e023a5feb | ||
|
|
14afe16a13 | ||
|
|
5766e1c501 | ||
|
|
fade399a21 | ||
|
|
3433f8a034 | ||
|
|
6a3077f9dc | ||
|
|
42c5dba208 | ||
|
|
7991419ab1 | ||
|
|
03e6afe9dc | ||
|
|
762d369560 | ||
|
|
2396c46b04 | ||
|
|
b626236128 | ||
|
|
45b67122fe | ||
|
|
7a387b8efe | ||
|
|
889e1f9ea2 | ||
|
|
d1f93ea0be | ||
|
|
82fd03be64 | ||
|
|
b50f9def00 | ||
|
|
91c16b3215 | ||
|
|
d45ae04006 | ||
|
|
9abf9c8e45 | ||
|
|
0f0e5b97d3 | ||
|
|
301aa0e16f | ||
|
|
dcd3b7a359 | ||
|
|
df8c4a1cf5 | ||
|
|
5a17ec1bba | ||
|
|
ab3aa108fc | ||
|
|
f194bb0a4c | ||
|
|
c073d55b2f | ||
|
|
3611e7d62f | ||
|
|
adc974bdd1 | ||
|
|
efad9eb624 | ||
|
|
246568f149 | ||
|
|
439f1e6f50 | ||
|
|
ef8e8980d1 | ||
|
|
08616abbb5 | ||
|
|
4237ff863c | ||
|
|
1a6ad54543 | ||
|
|
203468fd25 | ||
|
|
6368af1365 | ||
|
|
b38306bdd0 | ||
|
|
d26557bee3 | ||
|
|
b9f749467c | ||
|
|
7e5910a341 | ||
|
|
7403182645 | ||
|
|
bad381e5ba | ||
|
|
700586d959 | ||
|
|
c1d3efb6b8 | ||
|
|
c55b969791 | ||
|
|
079a156bf8 | ||
|
|
242216d85a | ||
|
|
b1ef68089b | ||
|
|
85ae33955f | ||
|
|
54a90aa5dd | ||
|
|
443ae3d3e7 | ||
|
|
3bf9a3d684 | ||
|
|
2138c43456 | ||
|
|
5a46c93987 | ||
|
|
180f91f0fe | ||
|
|
6443d37c97 | ||
|
|
13ca008380 | ||
|
|
24cb49f64c | ||
|
|
defe8e2591 | ||
|
|
e11fa357ff | ||
|
|
e20a65793e | ||
|
|
df14865f43 | ||
|
|
f45ecf6ad0 | ||
|
|
72bcef282d | ||
|
|
3a56f26d05 | ||
|
|
4dd17c7f59 | ||
|
|
73f2554932 | ||
|
|
627f574777 | ||
|
|
37a7f9bea8 | ||
|
|
d2d1532883 | ||
|
|
0dcef26b9d | ||
|
|
d646357cd3 | ||
|
|
8c6c0be15a | ||
|
|
dfc29db312 | ||
|
|
a41cf64b6c | ||
|
|
a2eda52b71 | ||
|
|
61006f0685 | ||
|
|
9b48bccde2 | ||
|
|
7c72caef3f | ||
|
|
0045b23800 | ||
|
|
c07fbc2053 | ||
|
|
e3485beb45 | ||
|
|
0f83fd96d5 | ||
|
|
8980a80560 | ||
|
|
90b6ce1d04 | ||
|
|
7a801d3d5d | ||
|
|
2c44f7d773 | ||
|
|
6506c86f58 | ||
|
|
51b409017d | ||
|
|
84613e73b0 | ||
|
|
a4ef45095e | ||
|
|
63ba948241 | ||
|
|
3067b32de5 | ||
|
|
a87518c474 | ||
|
|
b26e771865 | ||
|
|
2fbb0ab7a5 | ||
|
|
60352f84fe | ||
|
|
b7201013bc | ||
|
|
b61095ad47 | ||
|
|
278e6de8b0 | ||
|
|
843c58b92e | ||
|
|
cd412aa161 | ||
|
|
c5f80d1644 | ||
|
|
c50eabc77e | ||
|
|
a88389c4f1 | ||
|
|
1f7497c9d1 | ||
|
|
baaf737873 | ||
|
|
7c2fed1ceb | ||
|
|
3be048be18 | ||
|
|
f103b10b2a | ||
|
|
e44c704ae3 | ||
|
|
f32e0a9c1f | ||
|
|
24e3625cc0 | ||
|
|
4df54b9231 | ||
|
|
8557c6e0bb | ||
|
|
9fdc93c140 | ||
|
|
f3f8eb6824 | ||
|
|
b18e1c78d2 | ||
|
|
0a4114ef9f | ||
|
|
06f8001d65 | ||
|
|
61f3785e6c | ||
|
|
b0020f9436 | ||
|
|
74d738ec80 | ||
|
|
711eccedab | ||
|
|
5d58a86ba0 | ||
|
|
60c3b59552 | ||
|
|
22a127191d | ||
|
|
dc6e4ba5af | ||
|
|
ea640001d0 | ||
|
|
545b8ce2f1 | ||
|
|
3035d9cfae | ||
|
|
68c5968be8 | ||
|
|
0d1ca319c0 | ||
|
|
817eab51f1 | ||
|
|
e8b72130c2 | ||
|
|
8a44b278d1 | ||
|
|
54a5bf4ad3 | ||
|
|
10659b80ba | ||
|
|
1c6beae9b4 | ||
|
|
7ce9466c46 | ||
|
|
9f0390ee21 | ||
|
|
4e3f39468b | ||
|
|
a7e984f013 | ||
|
|
7c7f97c6b2 | ||
|
|
7e9132b817 | ||
|
|
59b95d9999 | ||
|
|
4dd15716db | ||
|
|
ec101b20d6 | ||
|
|
2c551afafb | ||
|
|
ad7bb82f40 | ||
|
|
961203e865 | ||
|
|
d72536805c | ||
|
|
ac6e19261f | ||
|
|
cbb0cad827 | ||
|
|
2c69f865f0 | ||
|
|
63510414ae | ||
|
|
fce5062a12 | ||
|
|
c77d750ef6 | ||
|
|
02ee9f96e4 | ||
|
|
ddaeccb2ee | ||
|
|
63ef204835 | ||
|
|
7d61cd3e2e | ||
|
|
db6da70c26 | ||
|
|
bf95bf2941 | ||
|
|
b3c9df1b1d | ||
|
|
87574be547 | ||
|
|
2e35214421 | ||
|
|
f5c2026dcf | ||
|
|
acf29cf659 | ||
|
|
fbd5ff88d5 | ||
|
|
7605416054 | ||
|
|
7a12e6028c | ||
|
|
d6876c6bad | ||
|
|
09eb375c5b | ||
|
|
48747463ed | ||
|
|
955f434d9d | ||
|
|
889183ec89 | ||
|
|
0650cc3bc2 | ||
|
|
6281e7a237 | ||
|
|
dff48f101b | ||
|
|
1081a15895 | ||
|
|
cf1d082628 | ||
|
|
54e829262d | ||
|
|
8830307e38 | ||
|
|
7a7deffa2c | ||
|
|
ecb181d9d7 | ||
|
|
fed1ee69c3 | ||
|
|
48aa4912a2 | ||
|
|
8886854367 | ||
|
|
a910e9f446 | ||
|
|
f3714fc493 | ||
|
|
6af4dd124b | ||
|
|
bc5e03630e | ||
|
|
6491bc53fb | ||
|
|
21eccfc2ef | ||
|
|
7b493416f7 | ||
|
|
5b8f00e720 | ||
|
|
0556e53e0c | ||
|
|
7379a96f73 | ||
|
|
c0ccb3d1aa | ||
|
|
98fcd12fa7 | ||
|
|
f2a1afe6d3 | ||
|
|
e16f83c1c2 | ||
|
|
55c563ff8c | ||
|
|
a5731b269e | ||
|
|
459efbf7af | ||
|
|
58aa0a3a31 | ||
|
|
177748d3d1 | ||
|
|
61edfc090e | ||
|
|
b3bee77c17 | ||
|
|
21db2547cb | ||
|
|
be131a0063 | ||
|
|
71879045e4 | ||
|
|
54e0c114fa | ||
|
|
17f422c1b7 | ||
|
|
a9f1a5195a | ||
|
|
8dab258ef0 | ||
|
|
f09d060580 | ||
|
|
ef2419efa9 | ||
|
|
2eff8e08e1 | ||
|
|
152ebb05dd | ||
|
|
5a9fc2cc7e | ||
|
|
dfed9794cb | ||
|
|
8d69b73c9e | ||
|
|
1a1f6f0788 | ||
|
|
7c9170c677 | ||
|
|
623c0537e1 | ||
|
|
4930b5f389 | ||
|
|
25435ce11d | ||
|
|
1f6cc6f8be | ||
|
|
59b6e24795 | ||
|
|
722dace828 | ||
|
|
6cebba0853 | ||
|
|
29528123a3 | ||
|
|
72618e374d | ||
|
|
c254b4ad1d | ||
|
|
cfd5af832a | ||
|
|
342cb863cd | ||
|
|
e1c979751d | ||
|
|
0231f3999e | ||
|
|
d1b148c919 | ||
|
|
e70e8c03e8 | ||
|
|
95bf683771 | ||
|
|
50cb2def73 |
116 changed files with 3934 additions and 3050 deletions
|
|
@ -5,19 +5,26 @@ packages:
|
||||||
- docker
|
- docker
|
||||||
- docker-compose
|
- docker-compose
|
||||||
# Build dependencies:
|
# Build dependencies:
|
||||||
- python-pip
|
|
||||||
- python-wheel
|
- python-wheel
|
||||||
|
- python-build
|
||||||
|
- python-installer
|
||||||
|
- python-setuptools-scm
|
||||||
# Runtime dependencies:
|
# Runtime dependencies:
|
||||||
- python-atomicwrites
|
|
||||||
- python-click
|
- python-click
|
||||||
- python-click-log
|
- python-click-log
|
||||||
- python-click-threading
|
- python-click-threading
|
||||||
- python-requests
|
- python-requests
|
||||||
- python-requests-toolbelt
|
- python-aiohttp-oauthlib
|
||||||
|
- python-tenacity
|
||||||
# Test dependencies:
|
# Test dependencies:
|
||||||
- python-hypothesis
|
- python-hypothesis
|
||||||
- python-pytest-cov
|
- python-pytest-cov
|
||||||
- python-pytest-localserver
|
- python-pytest-httpserver
|
||||||
|
- python-trustme
|
||||||
|
- python-pytest-asyncio
|
||||||
|
- python-aiohttp
|
||||||
|
- python-aiostream
|
||||||
|
- python-aioresponses
|
||||||
sources:
|
sources:
|
||||||
- https://github.com/pimutils/vdirsyncer
|
- https://github.com/pimutils/vdirsyncer
|
||||||
environment:
|
environment:
|
||||||
|
|
@ -28,11 +35,14 @@ environment:
|
||||||
REQUIREMENTS: release
|
REQUIREMENTS: release
|
||||||
# TODO: ETESYNC_TESTS
|
# TODO: ETESYNC_TESTS
|
||||||
tasks:
|
tasks:
|
||||||
- setup: |
|
- check-python:
|
||||||
|
python --version | grep 'Python 3.13'
|
||||||
|
- docker: |
|
||||||
sudo systemctl start docker
|
sudo systemctl start docker
|
||||||
|
- setup: |
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
python setup.py build
|
python -m build --wheel --skip-dependency-check --no-isolation
|
||||||
sudo pip install --no-index .
|
sudo python -m installer dist/*.whl
|
||||||
- test: |
|
- test: |
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
make -e ci-test
|
make -e ci-test
|
||||||
|
|
@ -3,11 +3,13 @@
|
||||||
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
||||||
# here, and consider that our "oldest suppported environment".
|
# here, and consider that our "oldest suppported environment".
|
||||||
|
|
||||||
image: archlinux
|
image: alpine/3.19 # python 3.11
|
||||||
packages:
|
packages:
|
||||||
- docker
|
- docker
|
||||||
|
- docker-cli
|
||||||
- docker-compose
|
- docker-compose
|
||||||
- python-pip
|
- py3-pip
|
||||||
|
- python3-dev
|
||||||
sources:
|
sources:
|
||||||
- https://github.com/pimutils/vdirsyncer
|
- https://github.com/pimutils/vdirsyncer
|
||||||
environment:
|
environment:
|
||||||
|
|
@ -16,15 +18,19 @@ environment:
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||||
DAV_SERVER: radicale xandikos
|
DAV_SERVER: radicale xandikos
|
||||||
REQUIREMENTS: minimal
|
REQUIREMENTS: minimal
|
||||||
# TODO: ETESYNC_TESTS
|
|
||||||
tasks:
|
tasks:
|
||||||
|
- venv: |
|
||||||
|
python3 -m venv $HOME/venv
|
||||||
|
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||||
|
- docker: |
|
||||||
|
sudo addgroup $(whoami) docker
|
||||||
|
sudo service docker start
|
||||||
- setup: |
|
- setup: |
|
||||||
sudo systemctl start docker
|
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
|
# Hack, no idea why it's needed
|
||||||
|
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
|
||||||
make -e install-dev
|
make -e install-dev
|
||||||
- test: |
|
- test: |
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
# Non-system python is used for packages:
|
|
||||||
export PATH=$PATH:~/.local/bin/
|
|
||||||
make -e ci-test
|
make -e ci-test
|
||||||
make -e ci-test-storage
|
make -e ci-test-storage
|
||||||
|
|
@ -5,11 +5,10 @@ packages:
|
||||||
- docker
|
- docker
|
||||||
- docker-compose
|
- docker-compose
|
||||||
- python-pip
|
- python-pip
|
||||||
- twine
|
|
||||||
sources:
|
sources:
|
||||||
- https://github.com/pimutils/vdirsyncer
|
- https://github.com/pimutils/vdirsyncer
|
||||||
secrets:
|
secrets:
|
||||||
- a36c8ba3-fba0-4338-b402-6aea0fbe771e
|
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536
|
||||||
environment:
|
environment:
|
||||||
BUILD: test
|
BUILD: test
|
||||||
CI: true
|
CI: true
|
||||||
|
|
@ -18,23 +17,29 @@ environment:
|
||||||
REQUIREMENTS: release
|
REQUIREMENTS: release
|
||||||
# TODO: ETESYNC_TESTS
|
# TODO: ETESYNC_TESTS
|
||||||
tasks:
|
tasks:
|
||||||
- setup: |
|
- venv: |
|
||||||
|
python -m venv $HOME/venv
|
||||||
|
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||||
|
- docker: |
|
||||||
sudo systemctl start docker
|
sudo systemctl start docker
|
||||||
|
- setup: |
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
make -e install-dev -e install-docs
|
make -e install-dev
|
||||||
- test: |
|
- test: |
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
# Non-system python is used for packages:
|
|
||||||
export PATH=$PATH:~/.local/bin/
|
|
||||||
make -e ci-test
|
make -e ci-test
|
||||||
make -e ci-test-storage
|
make -e ci-test-storage
|
||||||
- style: |
|
- check: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make check
|
||||||
|
- check-secrets: |
|
||||||
|
# Stop here if this is a PR. PRs can't run with the below secrets.
|
||||||
|
[ -f ~/fastmail-secrets ] || complete-build
|
||||||
|
- extra-storages: |
|
||||||
|
set +x
|
||||||
|
source ~/fastmail-secrets
|
||||||
|
set -x
|
||||||
|
|
||||||
cd vdirsyncer
|
cd vdirsyncer
|
||||||
# Non-system python is used for packages:
|
|
||||||
export PATH=$PATH:~/.local/bin/
|
export PATH=$PATH:~/.local/bin/
|
||||||
make -e style
|
DAV_SERVER=fastmail pytest tests/storage
|
||||||
git describe --exact-match --tags || complete-build
|
|
||||||
- publish: |
|
|
||||||
cd vdirsyncer
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
twine upload dist/*
|
|
||||||
|
|
@ -2,10 +2,3 @@ comment: false
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
patch: false
|
patch: false
|
||||||
project:
|
|
||||||
unit:
|
|
||||||
flags: unit
|
|
||||||
system:
|
|
||||||
flags: system
|
|
||||||
storage:
|
|
||||||
flags: storage
|
|
||||||
|
|
|
||||||
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
layout python3
|
||||||
38
.github/workflows/publish.yml
vendored
38
.github/workflows/publish.yml
vendored
|
|
@ -1,38 +0,0 @@
|
||||||
name: Publish
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- 0.*
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
github-release:
|
|
||||||
runs-on: ubuntu-18.04
|
|
||||||
name: Publish GitHub Release
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
architecture: x64
|
|
||||||
- run: pip install wheel
|
|
||||||
- run: python setup.py sdist bdist_wheel
|
|
||||||
- uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
files: dist/*
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
pypi:
|
|
||||||
runs-on: ubuntu-18.04
|
|
||||||
name: Publish package on PyPI
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@master
|
|
||||||
- uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
architecture: x64
|
|
||||||
- run: pip install wheel
|
|
||||||
- run: python setup.py sdist bdist_wheel
|
|
||||||
- uses: pypa/gh-action-pypi-publish@master
|
|
||||||
with:
|
|
||||||
password: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.0.1
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
args: [--markdown-linebreak-ext=md]
|
args: [--markdown-linebreak-ext=md]
|
||||||
|
|
@ -8,16 +8,32 @@ repos:
|
||||||
- id: check-toml
|
- id: check-toml
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: "3.9.2"
|
rev: "v1.15.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: mypy
|
||||||
additional_dependencies: [flake8-import-order, flake8-bugbear]
|
files: vdirsyncer/.*
|
||||||
- repo: https://github.com/psf/black
|
additional_dependencies:
|
||||||
rev: "21.6b0"
|
- types-setuptools
|
||||||
|
- types-docutils
|
||||||
|
- types-requests
|
||||||
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
|
rev: 'v0.11.4'
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
rev: v2.5.0
|
- id: ruff-format
|
||||||
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: typos-syncroniz
|
||||||
|
name: typos-syncroniz
|
||||||
|
language: system
|
||||||
|
# Not how you spell "synchronise"
|
||||||
|
entry: sh -c "git grep -i syncroniz"
|
||||||
|
files: ".*/.*"
|
||||||
|
- id: typos-text-icalendar
|
||||||
|
name: typos-text-icalendar
|
||||||
|
language: system
|
||||||
|
# It's "text/calendar", no "i".
|
||||||
|
entry: sh -c "git grep -i 'text/icalendar'"
|
||||||
|
files: ".*/.*"
|
||||||
|
|
|
||||||
16
.readthedocs.yaml
Normal file
16
.readthedocs.yaml
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: "ubuntu-22.04"
|
||||||
|
tools:
|
||||||
|
python: "3.9"
|
||||||
|
|
||||||
|
python:
|
||||||
|
install:
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
extra_requirements:
|
||||||
|
- docs
|
||||||
|
|
@ -4,15 +4,22 @@ Contributors
|
||||||
In alphabetical order:
|
In alphabetical order:
|
||||||
|
|
||||||
- Ben Boeckel
|
- Ben Boeckel
|
||||||
|
- Bleala
|
||||||
- Christian Geier
|
- Christian Geier
|
||||||
- Clément Mondon
|
- Clément Mondon
|
||||||
|
- Corey Hinshaw
|
||||||
|
- Kai Herlemann
|
||||||
- Hugo Osvaldo Barrera
|
- Hugo Osvaldo Barrera
|
||||||
|
- Jason Cox
|
||||||
- Julian Mehne
|
- Julian Mehne
|
||||||
- Malte Kiefer
|
- Malte Kiefer
|
||||||
- Marek Marczykowski-Górecki
|
- Marek Marczykowski-Górecki
|
||||||
- Markus Unterwaditzer
|
- Markus Unterwaditzer
|
||||||
- Michael Adler
|
- Michael Adler
|
||||||
|
- rEnr3n
|
||||||
- Thomas Weißschuh
|
- Thomas Weißschuh
|
||||||
|
- Witcher01
|
||||||
|
- samm81
|
||||||
|
|
||||||
Special thanks goes to:
|
Special thanks goes to:
|
||||||
|
|
||||||
|
|
|
||||||
105
CHANGELOG.rst
105
CHANGELOG.rst
|
|
@ -9,6 +9,111 @@ Package maintainers and users who have to manually update their installation
|
||||||
may want to subscribe to `GitHub's tag feed
|
may want to subscribe to `GitHub's tag feed
|
||||||
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
||||||
|
|
||||||
|
Version 0.21.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Implement retrying for ``google`` storage type when a rate limit is reached.
|
||||||
|
- ``tenacity`` is now a required dependency.
|
||||||
|
- Drop support for Python 3.8.
|
||||||
|
- Retry transient network errors for nullipotent requests.
|
||||||
|
|
||||||
|
Version 0.20.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Remove dependency on abandoned ``atomicwrites`` library.
|
||||||
|
- Implement ``filter_hook`` for the HTTP storage.
|
||||||
|
- Drop support for Python 3.7.
|
||||||
|
- Add support for Python 3.12 and Python 3.13.
|
||||||
|
- Properly close the status database after using. This especially affects tests,
|
||||||
|
where we were leaking a large amount of file descriptors.
|
||||||
|
- Extend supported versions of ``aiostream`` to include 0.7.x.
|
||||||
|
|
||||||
|
Version 0.19.3
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Added a no_delete option to the storage configuration. :gh:`1090`
|
||||||
|
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
|
||||||
|
- Add an option to request vCard v4.0. :gh:`1066`
|
||||||
|
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
|
||||||
|
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
|
||||||
|
- Implement digest auth. :gh:`1137`
|
||||||
|
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
|
||||||
|
|
||||||
|
Version 0.19.2
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Improve the performance of ``SingleFileStorage``. :gh:`818`
|
||||||
|
- Properly document some caveats of the Google Contacts storage.
|
||||||
|
- Fix crash when using auth certs. :gh:`1033`
|
||||||
|
- The ``filesystem`` storage can be specified with ``type =
|
||||||
|
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
|
||||||
|
functional impact, and is merely for forward compatibility with the Rust
|
||||||
|
implementation of vdirsyncer.
|
||||||
|
- Python 3.10 and 3.11 are officially supported.
|
||||||
|
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||||
|
Applications now need to be registered as "Desktop applications". Using "Web
|
||||||
|
application" no longer works due to changes on Google's side. :gh:`1078`
|
||||||
|
|
||||||
|
Version 0.19.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Fixed crash when operating on Google Contacts. :gh:`994`
|
||||||
|
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
|
||||||
|
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||||
|
Applications now need to be registered as "Web Application". :gh:`975`
|
||||||
|
- Various documentation updates.
|
||||||
|
|
||||||
|
Version 0.19.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Add "shell" password fetch strategy to pass command string to a shell.
|
||||||
|
- Add "description" and "order" as metadata. These fetch the CalDAV:
|
||||||
|
calendar-description, ``CardDAV:addressbook-description`` and
|
||||||
|
``apple-ns:calendar-order`` properties respectively.
|
||||||
|
- Add a new ``showconfig`` status. This prints *some* configuration values as
|
||||||
|
JSON. This is intended to be used by external tools and helpers that interact
|
||||||
|
with ``vdirsyncer``, and considered experimental.
|
||||||
|
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
|
||||||
|
"create", it implicitly creates missing collections during sync without user
|
||||||
|
prompts. This simplifies workflows where collections should be automatically
|
||||||
|
created on both sides.
|
||||||
|
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
|
||||||
|
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
|
||||||
|
- ``pytest-localserver`` is no longer required for tests.
|
||||||
|
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
|
||||||
|
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
|
||||||
|
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
|
||||||
|
This will likely continue improving over time. :gh:`906`
|
||||||
|
- The ``google`` storage types no longer require ``requests-oauthlib``, but
|
||||||
|
require ``python-aiohttp-oauthlib`` instead.
|
||||||
|
- Vdirsyncer no longer includes experimental support for `EteSync
|
||||||
|
<https://www.etesync.com/>`_. The existing integration had not been supported
|
||||||
|
for a long time and no longer worked. Support for external storages may be
|
||||||
|
added if anyone is interested in maintaining an EteSync plugin. EteSync
|
||||||
|
users should consider using `etesync-dav`_.
|
||||||
|
- The ``plist`` for macOS has been dropped. It was broken and homebrew
|
||||||
|
generates their own based on package metadata. macOS users are encouraged to
|
||||||
|
use that as a reference.
|
||||||
|
|
||||||
|
.. _etesync-dav: https://github.com/etesync/etesync-dav
|
||||||
|
|
||||||
|
Changes to SSL configuration
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
|
||||||
|
you're validating certificate fingerprints, use ``sha256`` instead.
|
||||||
|
|
||||||
|
When using a custom ``verify_fingerprint``, CA validation is always disabled.
|
||||||
|
|
||||||
|
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
|
||||||
|
both features is insecure and no longer supported.
|
||||||
|
|
||||||
|
The ``verify`` parameter no longer takes boolean values, it is now optional and
|
||||||
|
only takes a string to a custom CA for verification.
|
||||||
|
|
||||||
|
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
|
||||||
|
parameter in future.
|
||||||
|
|
||||||
Version 0.18.0
|
Version 0.18.0
|
||||||
==============
|
==============
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
prune docker
|
prune docker
|
||||||
prune scripts
|
prune scripts
|
||||||
prune tests/storage/servers
|
prune tests/storage/servers
|
||||||
prune tests/storage/etesync
|
|
||||||
recursive-include tests/storage/servers/radicale *
|
recursive-include tests/storage/servers/radicale *
|
||||||
recursive-include tests/storage/servers/skip *
|
recursive-include tests/storage/servers/skip *
|
||||||
|
|
||||||
|
|
|
||||||
52
Makefile
52
Makefile
|
|
@ -12,9 +12,6 @@ export REQUIREMENTS := release
|
||||||
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
||||||
export DETERMINISTIC_TESTS := false
|
export DETERMINISTIC_TESTS := false
|
||||||
|
|
||||||
# Run the etesync testsuite.
|
|
||||||
export ETESYNC_TESTS := false
|
|
||||||
|
|
||||||
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
||||||
# heavily "pollute" your system, such as attempting to install a new Python
|
# heavily "pollute" your system, such as attempting to install a new Python
|
||||||
# systemwide.
|
# systemwide.
|
||||||
|
|
@ -23,19 +20,8 @@ export CI := false
|
||||||
# Whether to generate coverage data while running tests.
|
# Whether to generate coverage data while running tests.
|
||||||
export COVERAGE := $(CI)
|
export COVERAGE := $(CI)
|
||||||
|
|
||||||
# Additional arguments that should be passed to py.test.
|
|
||||||
PYTEST_ARGS =
|
|
||||||
|
|
||||||
# Variables below this line are not very interesting for getting started.
|
# Variables below this line are not very interesting for getting started.
|
||||||
|
|
||||||
TEST_EXTRA_PACKAGES =
|
|
||||||
|
|
||||||
ifeq ($(ETESYNC_TESTS), true)
|
|
||||||
TEST_EXTRA_PACKAGES += git+https://github.com/etesync/journal-manager@v0.5.2
|
|
||||||
TEST_EXTRA_PACKAGES += django djangorestframework==3.8.2 wsgi_intercept drf-nested-routers
|
|
||||||
endif
|
|
||||||
|
|
||||||
PYTEST = py.test $(PYTEST_ARGS)
|
|
||||||
CODECOV_PATH = /tmp/codecov.sh
|
CODECOV_PATH = /tmp/codecov.sh
|
||||||
|
|
||||||
all:
|
all:
|
||||||
|
|
@ -43,35 +29,21 @@ all:
|
||||||
|
|
||||||
ci-test:
|
ci-test:
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||||
$(PYTEST) tests/unit/
|
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
|
||||||
bash $(CODECOV_PATH) -c -F unit
|
bash $(CODECOV_PATH) -c
|
||||||
$(PYTEST) tests/system/
|
|
||||||
bash $(CODECOV_PATH) -c -F system
|
|
||||||
[ "$(ETESYNC_TESTS)" = "false" ] || make test-storage
|
|
||||||
|
|
||||||
ci-test-storage:
|
ci-test-storage:
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||||
set -ex; \
|
set -ex; \
|
||||||
for server in $(DAV_SERVER); do \
|
for server in $(DAV_SERVER); do \
|
||||||
DAV_SERVER=$$server $(PYTEST) --cov-append tests/storage; \
|
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
|
||||||
done
|
done
|
||||||
bash $(CODECOV_PATH) -c -F storage
|
bash $(CODECOV_PATH) -c
|
||||||
|
|
||||||
test:
|
check:
|
||||||
$(PYTEST)
|
ruff check
|
||||||
|
ruff format --diff
|
||||||
style:
|
#mypy vdirsyncer
|
||||||
pre-commit run --all
|
|
||||||
! git grep -i syncroniz */*
|
|
||||||
! git grep -i 'text/icalendar' */*
|
|
||||||
sphinx-build -W -b html ./docs/ ./docs/_build/html/
|
|
||||||
|
|
||||||
install-docs:
|
|
||||||
pip install -Ur docs-requirements.txt
|
|
||||||
|
|
||||||
docs:
|
|
||||||
cd docs && make html
|
|
||||||
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
|
|
||||||
|
|
||||||
release-deb:
|
release-deb:
|
||||||
sh scripts/release-deb.sh debian jessie
|
sh scripts/release-deb.sh debian jessie
|
||||||
|
|
@ -82,12 +54,10 @@ release-deb:
|
||||||
|
|
||||||
install-dev:
|
install-dev:
|
||||||
pip install -U pip setuptools wheel
|
pip install -U pip setuptools wheel
|
||||||
pip install -e .
|
pip install -e '.[test,check,docs]'
|
||||||
pip install -Ur test-requirements.txt $(TEST_EXTRA_PACKAGES)
|
|
||||||
pip install pre-commit
|
|
||||||
[ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync]
|
|
||||||
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
||||||
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
|
pip install pyproject-dependencies && \
|
||||||
|
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: docs
|
.PHONY: docs
|
||||||
|
|
|
||||||
17
README.rst
17
README.rst
|
|
@ -6,8 +6,8 @@ vdirsyncer
|
||||||
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
||||||
:alt: CI status
|
:alt: CI status
|
||||||
|
|
||||||
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master
|
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main
|
||||||
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=master
|
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main
|
||||||
:alt: Codecov coverage report
|
:alt: Codecov coverage report
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
||||||
|
|
@ -23,7 +23,7 @@ vdirsyncer
|
||||||
:alt: Debian packages
|
:alt: Debian packages
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
||||||
:target: https://github.com/pimutils/vdirsyncer/blob/master/LICENCE
|
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE
|
||||||
:alt: licence: BSD
|
:alt: licence: BSD
|
||||||
|
|
||||||
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
||||||
|
|
@ -40,7 +40,7 @@ servers. It can also be used to synchronize calendars and/or addressbooks
|
||||||
between two servers directly.
|
between two servers directly.
|
||||||
|
|
||||||
It aims to be for calendars and contacts what `OfflineIMAP
|
It aims to be for calendars and contacts what `OfflineIMAP
|
||||||
<http://offlineimap.org/>`_ is for emails.
|
<https://www.offlineimap.org/>`_ is for emails.
|
||||||
|
|
||||||
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
||||||
|
|
||||||
|
|
@ -59,6 +59,15 @@ Links of interest
|
||||||
|
|
||||||
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
||||||
|
|
||||||
|
Dockerized
|
||||||
|
=================
|
||||||
|
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
|
||||||
|
Docker environment, you can check out the following GitHub Repository:
|
||||||
|
|
||||||
|
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
|
||||||
|
|
||||||
|
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
|
|
||||||
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Ask user to resolve a vdirsyncer sync conflict interactively.
|
||||||
|
|
||||||
|
Needs a way to ask the user.
|
||||||
|
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
|
||||||
|
|
||||||
|
Depends on python>3.5 and KDialog.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
|
||||||
|
|
||||||
|
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
|
||||||
|
|
||||||
|
This file is Free Software under the following license:
|
||||||
|
SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
|
||||||
|
Author: <bernhard.reiter@intevation.de>
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
KDIALOG = "/usr/bin/kdialog"
|
||||||
|
|
||||||
|
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
|
||||||
|
|
||||||
|
|
||||||
|
def get_summary(icalendar_text: str):
|
||||||
|
"""Get the first SUMMARY: line from an iCalendar text.
|
||||||
|
|
||||||
|
Do not care about the line being continued.
|
||||||
|
"""
|
||||||
|
match = re.search(SUMMARY_PATTERN, icalendar_text)
|
||||||
|
return match[1]
|
||||||
|
|
||||||
|
|
||||||
|
def main(ical1_filename, ical2_filename):
|
||||||
|
ical1 = ical1_filename.read_text()
|
||||||
|
ical2 = ical2_filename.read_text()
|
||||||
|
|
||||||
|
additional_args = ["--yes-label", "take first"] # return code == 0
|
||||||
|
additional_args += ["--no-label", "take second"] # return code == 1
|
||||||
|
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
|
||||||
|
|
||||||
|
r = subprocess.run(
|
||||||
|
args=[
|
||||||
|
KDIALOG,
|
||||||
|
"--warningyesnocancel",
|
||||||
|
"There was a sync conflict, do you prefer the first entry: \n"
|
||||||
|
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
|
||||||
|
"or the second entry:\n"
|
||||||
|
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
|
||||||
|
*additional_args,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if r.returncode == 2:
|
||||||
|
# cancel was pressed
|
||||||
|
return # shall lead to items not changed, because not copied
|
||||||
|
|
||||||
|
if r.returncode == 0:
|
||||||
|
# we want to take the first item, so overwrite the second
|
||||||
|
ical2_filename.write_text(ical1)
|
||||||
|
else: # r.returncode == 1, we want the second item, so overwrite the first
|
||||||
|
ical1_filename.write_text(ical2)
|
||||||
|
|
||||||
|
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
sys.stdout.write(__doc__)
|
||||||
|
else:
|
||||||
|
main(Path(sys.argv[1]), Path(sys.argv[2]))
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
||||||
<!-- Blueprint for cron-like launchd plist -->
|
|
||||||
<!-- Replace @@PLACEHOLDERS@@ with appropriate values for your system/settings! -->
|
|
||||||
<plist version="1.0">
|
|
||||||
<dict>
|
|
||||||
<key>EnvironmentVariables</key>
|
|
||||||
<dict>
|
|
||||||
<!-- Locale to use for vdirsyncer, e.g. en_US.UTF-8 -->
|
|
||||||
<key>LANG</key>
|
|
||||||
<string>@@LOCALE@@</string>
|
|
||||||
<key>LC_ALL</key>
|
|
||||||
<string>@@LOCALE@@</string>
|
|
||||||
</dict>
|
|
||||||
<key>Label</key>
|
|
||||||
<string>vdirsyncer</string>
|
|
||||||
<key>WorkingDirectory</key>
|
|
||||||
<!-- working directory for vdirsyncer, usually the base directory where
|
|
||||||
vdirsyncer is installed, e.g. /usr/local/ -->
|
|
||||||
<string>@@WORKINGDIRECTORY@@</string>
|
|
||||||
<key>ProgramArguments</key>
|
|
||||||
<array>
|
|
||||||
<!-- full path to vdirsyncer binary -->
|
|
||||||
<string>@@VDIRSYNCER@@</string>
|
|
||||||
<!-- only log errors -->
|
|
||||||
<string>-v</string>
|
|
||||||
<string>ERROR</string>
|
|
||||||
<string>sync</string>
|
|
||||||
</array>
|
|
||||||
<key>RunAtLoad</key>
|
|
||||||
<true/>
|
|
||||||
<key>StartInterval</key>
|
|
||||||
<!-- Sync intervall in seconds -->
|
|
||||||
<integer>@@SYNCINTERVALL@@</integer>
|
|
||||||
<!-- For logging, redirect stdout & stderr -->
|
|
||||||
<!-- <key>StandardErrorPath</key> -->
|
|
||||||
<!-- Full path to stderr logfile, e.g. /tmp/vdirsyncer_err.log -->
|
|
||||||
<!-- <string>@@STDERRFILE@@</string> -->
|
|
||||||
<!-- Full path to stdout logfile, e.g. /tmp/vdirsyncer_out.log -->
|
|
||||||
<!-- <key>StandardOutPath</key> -->
|
|
||||||
<!-- <string>@@STDOUTFILE@@</string> -->
|
|
||||||
</dict>
|
|
||||||
</plist>
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Synchronize calendars and contacts
|
Description=Synchronize calendars and contacts
|
||||||
Documentation=https://vdirsyncer.readthedocs.org/
|
Documentation=https://vdirsyncer.readthedocs.org/
|
||||||
|
StartLimitBurst=2
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=/usr/bin/vdirsyncer sync
|
ExecStart=/usr/bin/vdirsyncer sync
|
||||||
|
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
sphinx != 1.4.7
|
|
||||||
sphinx_rtd_theme
|
|
||||||
setuptools_scm
|
|
||||||
10
docs/conf.py
10
docs/conf.py
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
@ -18,7 +20,7 @@ copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
|
||||||
release = get_distribution("vdirsyncer").version
|
release = get_distribution("vdirsyncer").version
|
||||||
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
||||||
|
|
||||||
rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release
|
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
|
||||||
|
|
||||||
exclude_patterns = ["_build"]
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
|
|
@ -35,9 +37,7 @@ except ImportError:
|
||||||
html_theme = "default"
|
html_theme = "default"
|
||||||
if not on_rtd:
|
if not on_rtd:
|
||||||
print("-" * 74)
|
print("-" * 74)
|
||||||
print(
|
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
|
||||||
"Warning: sphinx-rtd-theme not installed, building with default " "theme."
|
|
||||||
)
|
|
||||||
print("-" * 74)
|
print("-" * 74)
|
||||||
|
|
||||||
html_static_path = ["_static"]
|
html_static_path = ["_static"]
|
||||||
|
|
@ -76,7 +76,7 @@ def github_issue_role(name, rawtext, text, lineno, inliner, options=None, conten
|
||||||
try:
|
try:
|
||||||
issue_num = int(text)
|
issue_num = int(text)
|
||||||
if issue_num <= 0:
|
if issue_num <= 0:
|
||||||
raise ValueError()
|
raise ValueError
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
|
|
|
||||||
156
docs/config.rst
156
docs/config.rst
|
|
@ -61,7 +61,8 @@ Pair Section
|
||||||
sync`` is executed. See also :ref:`collections_tutorial`.
|
sync`` is executed. See also :ref:`collections_tutorial`.
|
||||||
|
|
||||||
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
||||||
autodiscovery on a specific storage.
|
autodiscovery on a specific storage. It means all the collections on side A /
|
||||||
|
side B.
|
||||||
|
|
||||||
If the collection you want to sync doesn't have the same name on each side,
|
If the collection you want to sync doesn't have the same name on each side,
|
||||||
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
||||||
|
|
@ -71,8 +72,8 @@ Pair Section
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
|
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
|
||||||
collections from side B, and also the collections named "foo" and "bar".
|
the collections from side B, and also the collections named "foo" and "bar".
|
||||||
|
|
||||||
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
||||||
existing collections on either side.
|
existing collections on either side.
|
||||||
|
|
@ -116,10 +117,26 @@ Pair Section
|
||||||
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
||||||
metasync`` is executed. Example::
|
metasync`` is executed. Example::
|
||||||
|
|
||||||
metadata = ["color", "displayname"]
|
metadata = ["color", "displayname", "description", "order"]
|
||||||
|
|
||||||
This synchronizes the ``color`` and the ``displayname`` properties. The
|
This synchronizes the following properties:
|
||||||
``conflict_resolution`` parameter applies here as well.
|
|
||||||
|
- color: ``http://apple.com/ns/ical/:calendar-color``
|
||||||
|
- displayname: ``DAV:displayname``
|
||||||
|
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
|
||||||
|
- order: ``http://apple.com/ns/ical/:calendar-order``
|
||||||
|
|
||||||
|
The ``conflict_resolution`` parameter applies for these properties too.
|
||||||
|
|
||||||
|
.. _implicit_def:
|
||||||
|
|
||||||
|
- ``implicit``: Opt into implicitly creating collections. Example::
|
||||||
|
|
||||||
|
implicit = "create"
|
||||||
|
|
||||||
|
When set to "create", missing collections are automatically created on both
|
||||||
|
sides during sync without prompting the user. This simplifies workflows where
|
||||||
|
all collections should be synchronized bidirectionally.
|
||||||
|
|
||||||
.. _storage_config:
|
.. _storage_config:
|
||||||
|
|
||||||
|
|
@ -169,7 +186,7 @@ CalDAV and CardDAV
|
||||||
url = "..."
|
url = "..."
|
||||||
#username = ""
|
#username = ""
|
||||||
#password = ""
|
#password = ""
|
||||||
#verify = true
|
#verify = /path/to/custom_ca.pem
|
||||||
#auth = null
|
#auth = null
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
#verify_fingerprint = null
|
#verify_fingerprint = null
|
||||||
|
|
@ -202,12 +219,10 @@ CalDAV and CardDAV
|
||||||
:param url: Base URL or an URL to a calendar.
|
:param url: Base URL or an URL to a calendar.
|
||||||
:param username: Username for authentication.
|
:param username: Username for authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for authentication.
|
||||||
:param verify: Verify SSL certificate, default True. This can also be a
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
|
See :ref:`ssl-tutorial` for more information.
|
||||||
for more information.
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
expected server certificate. See :ref:`ssl-tutorial` for more
|
|
||||||
information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
default is preemptive Basic auth, sending credentials even if server
|
||||||
didn't request them. This saves from an additional roundtrip per
|
didn't request them. This saves from an additional roundtrip per
|
||||||
|
|
@ -229,21 +244,20 @@ CalDAV and CardDAV
|
||||||
url = "..."
|
url = "..."
|
||||||
#username = ""
|
#username = ""
|
||||||
#password = ""
|
#password = ""
|
||||||
#verify = true
|
#verify = /path/to/custom_ca.pem
|
||||||
#auth = null
|
#auth = null
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
#verify_fingerprint = null
|
#verify_fingerprint = null
|
||||||
#auth_cert = null
|
#auth_cert = null
|
||||||
|
#use_vcard_4 = false
|
||||||
|
|
||||||
:param url: Base URL or an URL to an addressbook.
|
:param url: Base URL or an URL to an addressbook.
|
||||||
:param username: Username for authentication.
|
:param username: Username for authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for authentication.
|
||||||
:param verify: Verify SSL certificate, default True. This can also be a
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
local path to a self-signed SSL certificate. See
|
See :ref:`ssl-tutorial` for more information.
|
||||||
:ref:`ssl-tutorial` for more information.
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
server certificate. See :ref:`ssl-tutorial` for
|
|
||||||
more information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
default is preemptive Basic auth, sending credentials even if
|
default is preemptive Basic auth, sending credentials even if
|
||||||
server didn't request them. This saves from an additional
|
server didn't request them. This saves from an additional
|
||||||
|
|
@ -253,6 +267,7 @@ CalDAV and CardDAV
|
||||||
certificate and the key or a list of paths to the files
|
certificate and the key or a list of paths to the files
|
||||||
with them.
|
with them.
|
||||||
:param useragent: Default ``vdirsyncer``.
|
:param useragent: Default ``vdirsyncer``.
|
||||||
|
:param use_vcard_4: Whether the server use vCard 4.0.
|
||||||
|
|
||||||
Google
|
Google
|
||||||
++++++
|
++++++
|
||||||
|
|
@ -266,6 +281,14 @@ in terms of data safety**. See `this blog post
|
||||||
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
||||||
up your data.
|
up your data.
|
||||||
|
|
||||||
|
Another caveat is that Google group labels are not synced with vCard's
|
||||||
|
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
|
||||||
|
(also see :gh:`814` and
|
||||||
|
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
|
||||||
|
for reference) and the
|
||||||
|
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
|
||||||
|
synced when only partial date information is present (e.g. the year is missing).
|
||||||
|
|
||||||
At first run you will be asked to authorize application for Google account
|
At first run you will be asked to authorize application for Google account
|
||||||
access.
|
access.
|
||||||
|
|
||||||
|
|
@ -277,25 +300,29 @@ Furthermore you need to register vdirsyncer as an application yourself to
|
||||||
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
||||||
Service to hardcode those into opensource software [googleterms]_:
|
Service to hardcode those into opensource software [googleterms]_:
|
||||||
|
|
||||||
1. Go to the `Google API Manager <https://console.developers.google.com>`_ and
|
1. Go to the `Google API Manager <https://console.developers.google.com>`_
|
||||||
create a new project under any name.
|
|
||||||
|
2. Create a new project under any name.
|
||||||
|
|
||||||
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
||||||
Calendar and Contacts APIs, those are different and won't work). There should
|
Calendar and Contacts APIs, those are different and won't work). There should
|
||||||
be a searchbox where you can just enter those terms.
|
be a search box where you can just enter those terms.
|
||||||
|
|
||||||
3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The
|
3. In the sidebar, select "Credentials", then "Create Credentials" and create a
|
||||||
application type is "Other".
|
new "OAuth Client ID".
|
||||||
|
|
||||||
You'll be prompted to create a OAuth consent screen first. Fill out that
|
You'll be prompted to create a OAuth consent screen first. Fill out that
|
||||||
form however you like.
|
form however you like.
|
||||||
|
|
||||||
|
After setting up the consent screen, finish creating the new "OAuth Client
|
||||||
|
ID'. The correct application type is "Desktop application".
|
||||||
|
|
||||||
4. Finally you should have a Client ID and a Client secret. Provide these in
|
4. Finally you should have a Client ID and a Client secret. Provide these in
|
||||||
your storage config.
|
your storage config.
|
||||||
|
|
||||||
The ``token_file`` parameter should be a filepath where vdirsyncer can later
|
The ``token_file`` parameter should be a path to a file where vdirsyncer can
|
||||||
store authentication-related data. You do not need to create the file itself
|
later store authentication-related data. You do not need to create the file
|
||||||
or write anything to it.
|
itself or write anything to it.
|
||||||
|
|
||||||
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
||||||
section "Confidential Matters".
|
section "Confidential Matters".
|
||||||
|
|
@ -303,7 +330,7 @@ or write anything to it.
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
You need to configure which calendars Google should offer vdirsyncer using
|
You need to configure which calendars Google should offer vdirsyncer using
|
||||||
a rather hidden `settings page
|
a secret `settings page
|
||||||
<https://calendar.google.com/calendar/syncselect>`_.
|
<https://calendar.google.com/calendar/syncselect>`_.
|
||||||
|
|
||||||
.. storage:: google_calendar
|
.. storage:: google_calendar
|
||||||
|
|
@ -343,55 +370,9 @@ or write anything to it.
|
||||||
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
||||||
API Manager.
|
API Manager.
|
||||||
|
|
||||||
EteSync
|
The current flow is not ideal, but Google has deprecated the previous APIs used
|
||||||
+++++++
|
for this without providing a suitable replacement. See :gh:`975` for discussion
|
||||||
|
on the topic.
|
||||||
`EteSync <https://www.etesync.com/>`_ is a new cloud provider for end to end
|
|
||||||
encrypted contacts and calendar storage. Vdirsyncer contains **experimental**
|
|
||||||
support for it.
|
|
||||||
|
|
||||||
To use it, you need to install some optional dependencies::
|
|
||||||
|
|
||||||
pip install vdirsyncer[etesync]
|
|
||||||
|
|
||||||
On first usage you will be prompted for the service password and the encryption
|
|
||||||
password. Neither are stored.
|
|
||||||
|
|
||||||
.. storage:: etesync_contacts
|
|
||||||
|
|
||||||
Contacts for etesync.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_etesync_contacts]
|
|
||||||
email = ...
|
|
||||||
secrets_dir = ...
|
|
||||||
#server_path = ...
|
|
||||||
#db_path = ...
|
|
||||||
|
|
||||||
:param email: The email address of your account.
|
|
||||||
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
|
||||||
key and authentication token.
|
|
||||||
:param server_url: Optional. URL to the root of your custom server.
|
|
||||||
:param db_path: Optional. Use a different path for the database.
|
|
||||||
|
|
||||||
.. storage:: etesync_calendars
|
|
||||||
|
|
||||||
Calendars for etesync.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage example_for_etesync_calendars]
|
|
||||||
email = ...
|
|
||||||
secrets_dir = ...
|
|
||||||
#server_path = ...
|
|
||||||
#db_path = ...
|
|
||||||
|
|
||||||
:param email: The email address of your account.
|
|
||||||
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
|
||||||
key and authentication token.
|
|
||||||
:param server_url: Optional. URL to the root of your custom server.
|
|
||||||
:param db_path: Optional. Use a different path for the database.
|
|
||||||
|
|
||||||
Local
|
Local
|
||||||
+++++
|
+++++
|
||||||
|
|
@ -408,6 +389,7 @@ Local
|
||||||
fileext = "..."
|
fileext = "..."
|
||||||
#encoding = "utf-8"
|
#encoding = "utf-8"
|
||||||
#post_hook = null
|
#post_hook = null
|
||||||
|
#pre_deletion_hook = null
|
||||||
#fileignoreext = ".tmp"
|
#fileignoreext = ".tmp"
|
||||||
|
|
||||||
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
||||||
|
|
@ -429,6 +411,8 @@ Local
|
||||||
:param post_hook: A command to call for each item creation and
|
:param post_hook: A command to call for each item creation and
|
||||||
modification. The command will be called with the path of the
|
modification. The command will be called with the path of the
|
||||||
new/updated file.
|
new/updated file.
|
||||||
|
:param pre_deletion_hook: A command to call for each item deletion.
|
||||||
|
The command will be called with the path of the deleted file.
|
||||||
:param fileeignoreext: The file extention to ignore. It is only useful
|
:param fileeignoreext: The file extention to ignore. It is only useful
|
||||||
if fileext is set to the empty string. The default is ``.tmp``.
|
if fileext is set to the empty string. The default is ``.tmp``.
|
||||||
|
|
||||||
|
|
@ -510,6 +494,7 @@ leads to an error.
|
||||||
[storage holidays_remote]
|
[storage holidays_remote]
|
||||||
type = "http"
|
type = "http"
|
||||||
url = https://example.com/holidays_from_hicksville.ics
|
url = https://example.com/holidays_from_hicksville.ics
|
||||||
|
#filter_hook = null
|
||||||
|
|
||||||
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
||||||
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
||||||
|
|
@ -522,12 +507,10 @@ leads to an error.
|
||||||
:param url: URL to the ``.ics`` file.
|
:param url: URL to the ``.ics`` file.
|
||||||
:param username: Username for authentication.
|
:param username: Username for authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for authentication.
|
||||||
:param verify: Verify SSL certificate, default True. This can also be a
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
|
See :ref:`ssl-tutorial` for more information.
|
||||||
for more information.
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
expected server certificate. See :ref:`ssl-tutorial` for more
|
|
||||||
information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
default is preemptive Basic auth, sending credentials even if server
|
||||||
didn't request them. This saves from an additional roundtrip per
|
didn't request them. This saves from an additional roundtrip per
|
||||||
|
|
@ -536,3 +519,8 @@ leads to an error.
|
||||||
:param auth_cert: Optional. Either a path to a certificate with a client
|
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||||
certificate and the key or a list of paths to the files with them.
|
certificate and the key or a list of paths to the files with them.
|
||||||
:param useragent: Default ``vdirsyncer``.
|
:param useragent: Default ``vdirsyncer``.
|
||||||
|
:param filter_hook: Optional. A filter command to call for each fetched
|
||||||
|
item, passed in raw form to stdin and returned via stdout.
|
||||||
|
If nothing is returned by the filter command, the item is skipped.
|
||||||
|
This can be used to alter fields as needed when dealing with providers
|
||||||
|
generating malformed events.
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,4 @@ Support and Contact
|
||||||
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
||||||
concrete bug reports and feature requests.
|
concrete bug reports and feature requests.
|
||||||
|
|
||||||
* Lastly, you can also `contact the author directly
|
* For security issues, contact ``contact@pimutils.org``.
|
||||||
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
|
|
||||||
that doesn't work out (i.e. if I don't respond within one week), use
|
|
||||||
``contact@pimutils.org``.
|
|
||||||
|
|
|
||||||
|
|
@ -79,22 +79,20 @@ For many patches, it might suffice to just let CI run the tests. However,
|
||||||
CI is slow, so you might want to run them locally too. For this, set up a
|
CI is slow, so you might want to run them locally too. For this, set up a
|
||||||
virtualenv_ and run this inside of it::
|
virtualenv_ and run this inside of it::
|
||||||
|
|
||||||
# install:
|
# Install development dependencies, including:
|
||||||
# - vdirsyncer from the repo into the virtualenv
|
# - vdirsyncer from the repo into the virtualenv
|
||||||
# - stylecheckers (flake8) and code formatters (autopep8)
|
# - style checks and formatting (ruff)
|
||||||
make install-dev
|
make install-dev
|
||||||
|
|
||||||
# Install git commit hook for some extra linting and checking
|
# Install git commit hook for some extra linting and checking
|
||||||
pre-commit install
|
pre-commit install
|
||||||
|
|
||||||
# Install development dependencies
|
|
||||||
make install-dev
|
|
||||||
|
|
||||||
Then you can run::
|
Then you can run::
|
||||||
|
|
||||||
make test # The normal testsuite
|
pytest # The normal testsuite
|
||||||
make style # Stylechecker
|
pre-commit run --all # Run all linters (which also run via pre-commit)
|
||||||
make docs # Build the HTML docs, output is at docs/_build/html/
|
make -C docs html # Build the HTML docs, output is at docs/_build/html/
|
||||||
|
make -C docs linkcheck # Check docs for any broken links
|
||||||
|
|
||||||
The ``Makefile`` has a lot of options that allow you to control which tests are
|
The ``Makefile`` has a lot of options that allow you to control which tests are
|
||||||
run, and which servers are tested. Take a look at its code where they are all
|
run, and which servers are tested. Take a look at its code where they are all
|
||||||
|
|
|
||||||
|
|
@ -2,23 +2,14 @@
|
||||||
Donations
|
Donations
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
vdirsyncer is and will always be free and open source software. We appreciate
|
||||||
|
sponsors willing to fund our continued work on it.
|
||||||
|
|
||||||
If you found my work useful, please consider donating. Thank you!
|
If you found my work useful, please consider donating. Thank you!
|
||||||
|
|
||||||
- Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt``
|
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``.
|
||||||
|
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
|
||||||
|
- Sponsor via recurring donations `via liberapay`_.
|
||||||
|
|
||||||
- `PayPal.me <https://www.paypal.me/untitaker>`_
|
.. _via Ko-fi: https://ko-fi.com/whynothugo
|
||||||
|
.. _via liberapay: https://liberapay.com/WhyNotHugo/
|
||||||
- `Bountysource <https://www.bountysource.com/teams/vdirsyncer>`_ is useful for
|
|
||||||
funding work on a specific GitHub issue.
|
|
||||||
|
|
||||||
- There's also `Bountysource Salt
|
|
||||||
<https://salt.bountysource.com/teams/vdirsyncer>`_, for one-time and
|
|
||||||
recurring donations.
|
|
||||||
|
|
||||||
- Donations via Bountysource are publicly listed. Use PayPal if you dislike
|
|
||||||
that.
|
|
||||||
|
|
||||||
- `Flattr
|
|
||||||
<https://flattr.com/submit/auto?user_id=untitaker&url=https%3A%2F%2Fgithub.com%2Fpimutils%2Fvdirsyncer>`_
|
|
||||||
or `Gratipay <https://gratipay.com/vdirsyncer/>`_ can be used for
|
|
||||||
recurring donations.
|
|
||||||
|
|
|
||||||
|
|
@ -7,17 +7,18 @@ Installation
|
||||||
OS/distro packages
|
OS/distro packages
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
The following packages are user-contributed and were up-to-date at the time of
|
The following packages are community-contributed and were up-to-date at the
|
||||||
writing:
|
time of writing:
|
||||||
|
|
||||||
- `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_
|
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
|
||||||
- `Ubuntu and Debian, x86_64-only
|
- `Ubuntu and Debian, x86_64-only
|
||||||
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
||||||
in the official repositories but may be out of date)
|
in the official repositories but may be out of date)
|
||||||
- `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_
|
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
|
||||||
- `OS X (homebrew) <http://braumeister.org/formula/vdirsyncer>`_
|
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
|
||||||
- `BSD (pkgsrc) <http://pkgsrc.se/time/py-vdirsyncer>`_
|
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
|
||||||
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
||||||
|
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
|
||||||
|
|
||||||
We only support the latest version of vdirsyncer, which is at the time of this
|
We only support the latest version of vdirsyncer, which is at the time of this
|
||||||
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
||||||
|
|
@ -41,27 +42,53 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
|
||||||
use Python's package manager "pip". First, you'll have to check that the
|
use Python's package manager "pip". First, you'll have to check that the
|
||||||
following things are installed:
|
following things are installed:
|
||||||
|
|
||||||
- Python 3.7+ and pip.
|
- Python 3.9 to 3.13 and pip.
|
||||||
- ``libxml`` and ``libxslt``
|
- ``libxml`` and ``libxslt``
|
||||||
- ``zlib``
|
- ``zlib``
|
||||||
- Linux or OS X. **Windows is not supported**, see :gh:`535`.
|
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
|
||||||
|
|
||||||
On Linux systems, using the distro's package manager is the best
|
On Linux systems, using the distro's package manager is the best
|
||||||
way to do this, for example, using Ubuntu::
|
way to do this, for example, using Ubuntu::
|
||||||
|
|
||||||
sudo apt-get install libxml2 libxslt1.1 zlib1g python
|
sudo apt-get install libxml2 libxslt1.1 zlib1g python3
|
||||||
|
|
||||||
Then you have several options. The following text applies for most Python
|
Then you have several options. The following text applies for most Python
|
||||||
software by the way.
|
software by the way.
|
||||||
|
|
||||||
|
pipx: The clean, easy way
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
pipx_ is a new package manager for Python-based software that automatically
|
||||||
|
sets up a virtual environment for each program it installs. Please note that
|
||||||
|
installing via pipx will not include manual pages nor systemd services.
|
||||||
|
|
||||||
|
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
|
||||||
|
|
||||||
|
Assuming that pipx is installed, vdirsyncer can be installed with::
|
||||||
|
|
||||||
|
pipx install vdirsyncer
|
||||||
|
|
||||||
|
It can later be updated to the latest version with::
|
||||||
|
|
||||||
|
pipx upgrade vdirsyncer
|
||||||
|
|
||||||
|
And can be uninstalled with::
|
||||||
|
|
||||||
|
pipx uninstall vdirsyncer
|
||||||
|
|
||||||
|
This last command will remove vdirsyncer and any dependencies installed into
|
||||||
|
the above location.
|
||||||
|
|
||||||
|
.. _pipx: https://github.com/pipxproject/pipx
|
||||||
|
|
||||||
The dirty, easy way
|
The dirty, easy way
|
||||||
~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
The easiest way to install vdirsyncer at this point would be to run::
|
If pipx is not available on your distribution, the easiest way to install
|
||||||
|
vdirsyncer at this point would be to run::
|
||||||
|
|
||||||
pip install --user --ignore-installed vdirsyncer
|
pip install --ignore-installed vdirsyncer
|
||||||
|
|
||||||
- ``--user`` is to install without root rights (into your home directory)
|
|
||||||
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
||||||
(see :ref:`debian-urllib3`).
|
(see :ref:`debian-urllib3`).
|
||||||
|
|
||||||
|
|
@ -92,25 +119,4 @@ This method has two advantages:
|
||||||
distro-specific issues.
|
distro-specific issues.
|
||||||
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
||||||
|
|
||||||
The clean, easy way
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
pipx_ is a new package manager for Python-based software that automatically
|
|
||||||
sets up a virtualenv for each program you install. Assuming you have it
|
|
||||||
installed on your operating system, you can do::
|
|
||||||
|
|
||||||
pipx install vdirsyncer
|
|
||||||
|
|
||||||
and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
|
|
||||||
update vdirsyncer to the latest version::
|
|
||||||
|
|
||||||
pipx upgrade vdirsyncer
|
|
||||||
|
|
||||||
If you're done with vdirsyncer, you can do::
|
|
||||||
|
|
||||||
pipx uninstall vdirsyncer
|
|
||||||
|
|
||||||
and vdirsyncer will be uninstalled, including its dependencies.
|
|
||||||
|
|
||||||
.. _virtualenv: https://virtualenv.readthedocs.io/
|
.. _virtualenv: https://virtualenv.readthedocs.io/
|
||||||
.. _pipx: https://github.com/pipxproject/pipx
|
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,12 @@ You can fetch the username as well::
|
||||||
|
|
||||||
Or really any kind of parameter in a storage section.
|
Or really any kind of parameter in a storage section.
|
||||||
|
|
||||||
|
You can also pass the command as a string to be executed in a shell::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
...
|
||||||
|
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
|
||||||
|
|
||||||
With pass_ for example, you might find yourself writing something like this in
|
With pass_ for example, you might find yourself writing something like this in
|
||||||
your configuration file::
|
your configuration file::
|
||||||
|
|
||||||
|
|
@ -72,3 +78,19 @@ You can also simply prompt for the password::
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
username = "myusername"
|
username = "myusername"
|
||||||
password.fetch = ["prompt", "Password for CalDAV"]
|
password.fetch = ["prompt", "Password for CalDAV"]
|
||||||
|
|
||||||
|
Environment variable
|
||||||
|
===============
|
||||||
|
|
||||||
|
To read the password from an environment variable::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
username = "myusername"
|
||||||
|
password.fetch = ["command", "printenv", "DAV_PW"]
|
||||||
|
|
||||||
|
This is especially handy if you use the same password multiple times
|
||||||
|
(say, for a CardDAV and a CalDAV storage).
|
||||||
|
On bash, you can read and export the password without printing::
|
||||||
|
|
||||||
|
read -s DAV_PW "DAV Password: " && export DAV_PW
|
||||||
|
|
|
||||||
|
|
@ -46,15 +46,16 @@ You can install the all development dependencies with::
|
||||||
make install-dev
|
make install-dev
|
||||||
|
|
||||||
You probably don't want this since it will use pip to download the
|
You probably don't want this since it will use pip to download the
|
||||||
dependencies. Alternatively you can find the testing dependencies in
|
dependencies. Alternatively test dependencies are listed as ``test`` optional
|
||||||
``test-requirements.txt``, again with lower-bound version requirements.
|
dependencies in ``pyproject.toml``, again with lower-bound version
|
||||||
|
requirements.
|
||||||
|
|
||||||
You also have to have vdirsyncer fully installed at this point. Merely
|
You also have to have vdirsyncer fully installed at this point. Merely
|
||||||
``cd``-ing into the tarball will not be sufficient.
|
``cd``-ing into the tarball will not be sufficient.
|
||||||
|
|
||||||
Running the tests happens with::
|
Running the tests happens with::
|
||||||
|
|
||||||
make test
|
pytest
|
||||||
|
|
||||||
Hypothesis will randomly generate test input. If you care about deterministic
|
Hypothesis will randomly generate test input. If you care about deterministic
|
||||||
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
||||||
|
|
@ -73,10 +74,11 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
|
||||||
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
||||||
take care of the HTML docs' formatting.
|
take care of the HTML docs' formatting.
|
||||||
|
|
||||||
You can find a list of dependencies in ``docs-requirements.txt``. Again, you
|
You can find a list of dependencies in ``pyproject.toml``, in the
|
||||||
can install those using pip with::
|
``project.optional-dependencies`` section as ``docs``. Again, you can install
|
||||||
|
those using pip with::
|
||||||
|
|
||||||
make install-docs
|
pip install '.[docs]'
|
||||||
|
|
||||||
Then change into the ``docs/`` directory and build whatever format you want
|
Then change into the ``docs/`` directory and build whatever format you want
|
||||||
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
||||||
|
|
|
||||||
|
|
@ -18,5 +18,5 @@ package that don't play well with packages assuming a normal ``requests``. This
|
||||||
is due to stubbornness on both sides.
|
is due to stubbornness on both sides.
|
||||||
|
|
||||||
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
||||||
around this, that is, to install vdirsyncer in a virtualenv, see
|
around this, that is, to install vdirsyncer in a virtual environment, see
|
||||||
:ref:`manual-installation`.
|
:ref:`manual-installation`.
|
||||||
|
|
|
||||||
|
|
@ -14,21 +14,14 @@ To pin the certificate by fingerprint::
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
...
|
...
|
||||||
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
|
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
|
||||||
#verify = false # Optional: Disable CA validation, useful for self-signed certs
|
|
||||||
|
|
||||||
SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their
|
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
|
||||||
length.
|
CA validation is disabled when pinning a fingerprint.
|
||||||
|
|
||||||
You can use the following command for obtaining a SHA-1 fingerprint::
|
You can use the following command for obtaining a SHA256 fingerprint::
|
||||||
|
|
||||||
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
|
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
|
||||||
|
|
||||||
Note that ``verify_fingerprint`` doesn't suffice for vdirsyncer to work with
|
|
||||||
self-signed certificates (or certificates that are not in your trust store). You
|
|
||||||
most likely need to set ``verify = false`` as well. This disables verification
|
|
||||||
of the SSL certificate's expiration time and the existence of it in your trust
|
|
||||||
store, all that's verified now is the fingerprint.
|
|
||||||
|
|
||||||
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
||||||
that you can forget about all of that. It is easier to deploy a free
|
that you can forget about all of that. It is easier to deploy a free
|
||||||
|
|
@ -47,22 +40,16 @@ To point vdirsyncer to a custom set of root CAs::
|
||||||
...
|
...
|
||||||
verify = "/path/to/cert.pem"
|
verify = "/path/to/cert.pem"
|
||||||
|
|
||||||
Vdirsyncer uses the requests_ library, which, by default, `uses its own set of
|
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
|
||||||
trusted CAs
|
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
|
||||||
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
|
|
||||||
|
|
||||||
However, the actual behavior depends on how you have installed it. Many Linux
|
There are cases where certificate validation fails even though you can access
|
||||||
distributions patch their ``python-requests`` package to use the system
|
the server fine through e.g. your browser. This usually indicates that your
|
||||||
certificate CAs. Normally these two stores are similar enough for you to not
|
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
|
||||||
care.
|
such cases, it makes sense to explicitly set ``verify`` or
|
||||||
|
``verify_fingerprint`` as shown above.
|
||||||
|
|
||||||
But there are cases where certificate validation fails even though you can
|
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
|
||||||
access the server fine through e.g. your browser. This usually indicates that
|
|
||||||
your installation of the ``requests`` library is somehow broken. In such cases,
|
|
||||||
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
|
|
||||||
above.
|
|
||||||
|
|
||||||
.. _requests: http://www.python-requests.org/
|
|
||||||
|
|
||||||
.. _ssl-client-certs:
|
.. _ssl-client-certs:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ Configuration
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
- The `config.example from the repository
|
- The `config.example from the repository
|
||||||
<https://github.com/pimutils/vdirsyncer/blob/master/config.example>`_
|
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_
|
||||||
contains a very terse version of this.
|
contains a very terse version of this.
|
||||||
|
|
||||||
- In this example we set up contacts synchronization, but calendar sync
|
- In this example we set up contacts synchronization, but calendar sync
|
||||||
|
|
@ -176,8 +176,11 @@ as a file called ``color`` within the calendar folder.
|
||||||
More information about collections
|
More information about collections
|
||||||
----------------------------------
|
----------------------------------
|
||||||
|
|
||||||
"Collection" is a collective term for addressbooks and calendars. Each
|
"Collection" is a collective term for addressbooks and calendars. A Cardav or
|
||||||
collection from a storage has a "collection name", a unique identifier for each
|
Caldav server can contains several "collections" which correspond to several
|
||||||
|
addressbooks or calendar.
|
||||||
|
|
||||||
|
Each collection from a storage has a "collection name", a unique identifier for each
|
||||||
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
||||||
directory that represents the collection, in the case of the DAV-storages this
|
directory that represents the collection, in the case of the DAV-storages this
|
||||||
is the last segment of the URL. We use this identifier in the ``collections``
|
is the last segment of the URL. We use this identifier in the ``collections``
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ Further applications, with missing pages:
|
||||||
|
|
||||||
.. _khal: http://lostpackets.de/khal/
|
.. _khal: http://lostpackets.de/khal/
|
||||||
.. _dayplanner: http://www.day-planner.org/
|
.. _dayplanner: http://www.day-planner.org/
|
||||||
.. _Orage: http://www.kolumbus.fi/~w408237/orage/
|
.. _Orage: https://gitlab.xfce.org/apps/orage
|
||||||
.. _rainlendar: http://www.rainlendar.net/
|
.. _rainlendar: http://www.rainlendar.net/
|
||||||
.. _khard: https://github.com/scheibler/khard/
|
.. _khard: https://github.com/scheibler/khard/
|
||||||
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
||||||
|
|
|
||||||
|
|
@ -13,8 +13,8 @@ minutes).
|
||||||
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
||||||
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
||||||
|
|
||||||
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.service
|
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service
|
||||||
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.timer
|
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer
|
||||||
|
|
||||||
Activation
|
Activation
|
||||||
----------
|
----------
|
||||||
|
|
|
||||||
|
|
@ -48,10 +48,9 @@ instance to subfolders of ``~/.calendar/``.
|
||||||
Setting up todoman
|
Setting up todoman
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Write this to ``~/.config/todoman/todoman.conf``::
|
Write this to ``~/.config/todoman/config.py``::
|
||||||
|
|
||||||
[main]
|
path = "~/.calendars/*"
|
||||||
path = ~/.calendars/*
|
|
||||||
|
|
||||||
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
||||||
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
||||||
|
|
|
||||||
|
|
@ -56,8 +56,11 @@ have any file extensions.
|
||||||
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
||||||
present.
|
present.
|
||||||
|
|
||||||
- A file called ``displayname`` contains a UTF-8 encoded label that may be used
|
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/
|
||||||
to represent the vdir in UIs.
|
description, that may be used to represent the vdir in UIs.
|
||||||
|
|
||||||
|
- A file called ``order`` inside the vdir includes the relative order
|
||||||
|
of the calendar, a property that is only relevant in UI design.
|
||||||
|
|
||||||
Writing to vdirs
|
Writing to vdirs
|
||||||
================
|
================
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,6 @@ program chosen:
|
||||||
|
|
||||||
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
||||||
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
||||||
e.g. DAVx⁵_ or the apps by dmfs_.
|
e.g. DAVx⁵_ or other apps bundled with smartphones.
|
||||||
|
|
||||||
.. _DAVx⁵: https://www.davx5.com/
|
.. _DAVx⁵: https://www.davx5.com/
|
||||||
.. _dmfs: https://dmfs.org/
|
|
||||||
|
|
|
||||||
29
publish-release.yaml
Normal file
29
publish-release.yaml
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Push new version to PyPI.
|
||||||
|
#
|
||||||
|
# Usage: hut builds submit publish-release.yaml --follow
|
||||||
|
|
||||||
|
image: alpine/edge
|
||||||
|
packages:
|
||||||
|
- py3-build
|
||||||
|
- py3-pip
|
||||||
|
- py3-setuptools
|
||||||
|
- py3-setuptools_scm
|
||||||
|
- py3-wheel
|
||||||
|
- twine
|
||||||
|
sources:
|
||||||
|
- https://github.com/pimutils/vdirsyncer
|
||||||
|
secrets:
|
||||||
|
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
|
||||||
|
environment:
|
||||||
|
CI: true
|
||||||
|
tasks:
|
||||||
|
- check-tag: |
|
||||||
|
cd vdirsyncer
|
||||||
|
git fetch --tags
|
||||||
|
|
||||||
|
# Stop here unless this is a tag.
|
||||||
|
git describe --exact-match --tags || complete-build
|
||||||
|
- publish: |
|
||||||
|
cd vdirsyncer
|
||||||
|
python -m build --no-isolation
|
||||||
|
twine upload --non-interactive dist/*
|
||||||
114
pyproject.toml
Normal file
114
pyproject.toml
Normal file
|
|
@ -0,0 +1,114 @@
|
||||||
|
# Vdirsyncer synchronizes calendars and contacts.
|
||||||
|
#
|
||||||
|
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
||||||
|
# how to package vdirsyncer.
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=64", "setuptools_scm>=8"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "vdirsyncer"
|
||||||
|
authors = [
|
||||||
|
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
|
||||||
|
]
|
||||||
|
description = "Synchronize calendars and contacts"
|
||||||
|
readme = "README.rst"
|
||||||
|
requires-python = ">=3.9"
|
||||||
|
keywords = ["todo", "task", "icalendar", "cli"]
|
||||||
|
license = "BSD-3-Clause"
|
||||||
|
license-files = ["LICENSE"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Operating System :: POSIX",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Topic :: Internet",
|
||||||
|
"Topic :: Office/Business :: Scheduling",
|
||||||
|
"Topic :: Utilities",
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
"click>=5.0,<9.0",
|
||||||
|
"click-log>=0.3.0,<0.5.0",
|
||||||
|
"requests>=2.20.0",
|
||||||
|
"aiohttp>=3.8.2,<4.0.0",
|
||||||
|
"aiostream>=0.4.3,<0.8.0",
|
||||||
|
"tenacity>=9.0.0",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
google = ["aiohttp-oauthlib"]
|
||||||
|
test = [
|
||||||
|
"hypothesis>=6.72.0,<7.0.0",
|
||||||
|
"pytest",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-httpserver",
|
||||||
|
"trustme",
|
||||||
|
"pytest-asyncio",
|
||||||
|
"aioresponses",
|
||||||
|
]
|
||||||
|
docs = [
|
||||||
|
"sphinx!=1.4.7",
|
||||||
|
"sphinx_rtd_theme",
|
||||||
|
"setuptools_scm",
|
||||||
|
]
|
||||||
|
check = [
|
||||||
|
"mypy",
|
||||||
|
"ruff",
|
||||||
|
"types-docutils",
|
||||||
|
"types-requests",
|
||||||
|
"types-setuptools",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
vdirsyncer = "vdirsyncer.cli:app"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
extend-select = [
|
||||||
|
"B0",
|
||||||
|
"C4",
|
||||||
|
"E",
|
||||||
|
"I",
|
||||||
|
"RSE",
|
||||||
|
"SIM",
|
||||||
|
"TID",
|
||||||
|
"UP",
|
||||||
|
"W",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
force-single-line = true
|
||||||
|
required-imports = ["from __future__ import annotations"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = """
|
||||||
|
--tb=short
|
||||||
|
--cov-config .coveragerc
|
||||||
|
--cov=vdirsyncer
|
||||||
|
--cov-report=term-missing:skip-covered
|
||||||
|
--no-cov-on-fail
|
||||||
|
--color=yes
|
||||||
|
"""
|
||||||
|
# filterwarnings=error
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"if TYPE_CHECKING:",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
include = ["vdirsyncer*"]
|
||||||
|
|
||||||
|
[tool.setuptools_scm]
|
||||||
|
write_to = "vdirsyncer/version.py"
|
||||||
|
version_scheme = "no-guess-dev"
|
||||||
49
scripts/_build_deb_in_container.bash
Normal file
49
scripts/_build_deb_in_container.bash
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# This script is mean to be run inside a dedicated container,
|
||||||
|
# and not interatively.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y build-essential fakeroot debhelper git
|
||||||
|
apt-get install -y python3-all python3-pip python3-venv
|
||||||
|
apt-get install -y ruby ruby-dev
|
||||||
|
|
||||||
|
pip3 install virtualenv virtualenv-tools3
|
||||||
|
virtualenv -p python3 /vdirsyncer/env/
|
||||||
|
|
||||||
|
gem install fpm
|
||||||
|
|
||||||
|
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
||||||
|
pip3 uninstall -y virtualenv
|
||||||
|
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
||||||
|
chmod +x /usr/local/bin/virtualenv
|
||||||
|
|
||||||
|
cp -r /source/ /vdirsyncer/vdirsyncer/
|
||||||
|
cd /vdirsyncer/vdirsyncer/ || exit 2
|
||||||
|
mkdir /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
||||||
|
# XXX: Do I really not want google support included?
|
||||||
|
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
||||||
|
fpm --verbose \
|
||||||
|
--input-type virtualenv \
|
||||||
|
--output-type deb \
|
||||||
|
--name "vdirsyncer-latest" \
|
||||||
|
--version "$(cat version)" \
|
||||||
|
--prefix /opt/venvs/vdirsyncer-latest \
|
||||||
|
--depends python3 \
|
||||||
|
requirements.txt
|
||||||
|
|
||||||
|
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
cd /vdirsyncer/pkgs/
|
||||||
|
dpkg -i -- *.deb
|
||||||
|
|
||||||
|
# Check that it works:
|
||||||
|
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
||||||
|
|
||||||
|
cp -- *.deb /source/
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
ARG distro
|
|
||||||
ARG distrover
|
|
||||||
|
|
||||||
FROM $distro:$distrover
|
|
||||||
|
|
||||||
RUN apt-get update
|
|
||||||
RUN apt-get install -y build-essential fakeroot debhelper git
|
|
||||||
RUN apt-get install -y python3-all python3-pip python3-venv
|
|
||||||
RUN apt-get install -y ruby ruby-dev
|
|
||||||
|
|
||||||
RUN gem install fpm package_cloud
|
|
||||||
|
|
||||||
RUN pip3 install virtualenv virtualenv-tools3
|
|
||||||
RUN virtualenv -p python3 /vdirsyncer/env/
|
|
||||||
|
|
||||||
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
|
||||||
RUN pip3 uninstall -y virtualenv
|
|
||||||
RUN echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
|
||||||
RUN chmod +x /usr/local/bin/virtualenv
|
|
||||||
|
|
||||||
COPY . /vdirsyncer/vdirsyncer/
|
|
||||||
WORKDIR /vdirsyncer/vdirsyncer/
|
|
||||||
RUN mkdir /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
|
||||||
RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
|
||||||
RUN fpm --verbose \
|
|
||||||
--input-type virtualenv \
|
|
||||||
--output-type deb \
|
|
||||||
--name "vdirsyncer-latest" \
|
|
||||||
--version "$(cat version)" \
|
|
||||||
--prefix /opt/venvs/vdirsyncer-latest \
|
|
||||||
--depends python3 \
|
|
||||||
requirements.txt
|
|
||||||
|
|
||||||
RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
WORKDIR /vdirsyncer/pkgs/
|
|
||||||
RUN dpkg -i *.deb
|
|
||||||
|
|
||||||
# Check that it works:
|
|
||||||
RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
|
||||||
|
|
@ -1,26 +1,56 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
set -xe
|
set -xeu
|
||||||
|
|
||||||
DISTRO=$1
|
SCRIPT_PATH=$(realpath "$0")
|
||||||
DISTROVER=$2
|
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
||||||
|
|
||||||
NAME="vdirsyncer-${DISTRO}-${DISTROVER}:latest"
|
# E.g.: debian, ubuntu
|
||||||
|
DISTRO=${DISTRO:1}
|
||||||
|
# E.g.: bullseye, bookwork
|
||||||
|
DISTROVER=${DISTROVER:2}
|
||||||
|
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
|
||||||
CONTEXT="$(mktemp -d)"
|
CONTEXT="$(mktemp -d)"
|
||||||
|
|
||||||
|
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
rm -rf "$CONTEXT"
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Prepare files.
|
||||||
|
cp scripts/_build_deb_in_container.bash "$CONTEXT"
|
||||||
python setup.py sdist -d "$CONTEXT"
|
python setup.py sdist -d "$CONTEXT"
|
||||||
|
|
||||||
# Build the package in a container with the right distro version.
|
docker run -it \
|
||||||
docker build \
|
--name "$CONTAINER_NAME" \
|
||||||
--build-arg distro=$DISTRO \
|
--volume "$CONTEXT:/source" \
|
||||||
--build-arg distrover=$DISTROVER \
|
"$DISTRO:$DISTROVER" \
|
||||||
-t $NAME \
|
bash /source/_build_deb_in_container.bash
|
||||||
-f scripts/dpkg.Dockerfile \
|
|
||||||
"$CONTEXT"
|
|
||||||
|
|
||||||
# Push the package to packagecloud.
|
# Keep around the package filename.
|
||||||
# TODO: Use ~/.packagecloud for CI.
|
PACKAGE=$(ls "$CONTEXT"/*.deb)
|
||||||
docker run -e PACKAGECLOUD_TOKEN=$PACKAGECLOUD_TOKEN $NAME \
|
PACKAGE=$(basename "$PACKAGE")
|
||||||
bash -xec "package_cloud push pimutils/vdirsyncer/$DISTRO/$DISTROVER *.deb"
|
|
||||||
|
|
||||||
rm -rf "$CONTEXT"
|
# Save the build deb files.
|
||||||
|
mkdir -p "$DEST_DIR"
|
||||||
|
cp "$CONTEXT"/*.deb "$DEST_DIR"
|
||||||
|
|
||||||
|
echo Build complete! 🤖
|
||||||
|
|
||||||
|
# Packagecloud uses some internal IDs for each distro.
|
||||||
|
# Extract the one for the distro we're publishing.
|
||||||
|
DISTRO_ID=$(
|
||||||
|
curl -s \
|
||||||
|
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
|
||||||
|
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Actually push the package.
|
||||||
|
curl \
|
||||||
|
-F "package[distro_version_id]=$DISTRO_ID" \
|
||||||
|
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
|
||||||
|
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
|
||||||
|
|
||||||
|
echo Done! ✨
|
||||||
|
|
|
||||||
21
setup.cfg
21
setup.cfg
|
|
@ -1,21 +0,0 @@
|
||||||
[wheel]
|
|
||||||
universal = 1
|
|
||||||
|
|
||||||
[tool:pytest]
|
|
||||||
addopts =
|
|
||||||
--tb=short
|
|
||||||
--cov-config .coveragerc
|
|
||||||
--cov=vdirsyncer
|
|
||||||
--cov-report=term-missing
|
|
||||||
--no-cov-on-fail
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
application-import-names = tests,vdirsyncer
|
|
||||||
extend-ignore =
|
|
||||||
E203, # Black-incompatible colon spacing.
|
|
||||||
W503, # Line jump before binary operator.
|
|
||||||
I100,
|
|
||||||
I202
|
|
||||||
max-line-length = 88
|
|
||||||
exclude = .eggs,build
|
|
||||||
import-order-style = smarkets
|
|
||||||
82
setup.py
82
setup.py
|
|
@ -1,82 +0,0 @@
|
||||||
"""
|
|
||||||
Vdirsyncer synchronizes calendars and contacts.
|
|
||||||
|
|
||||||
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
|
||||||
how to package vdirsyncer.
|
|
||||||
"""
|
|
||||||
from setuptools import Command
|
|
||||||
from setuptools import find_packages
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
|
|
||||||
requirements = [
|
|
||||||
# https://github.com/mitsuhiko/click/issues/200
|
|
||||||
"click>=5.0,<9.0",
|
|
||||||
"click-log>=0.3.0, <0.4.0",
|
|
||||||
# https://github.com/pimutils/vdirsyncer/issues/478
|
|
||||||
"click-threading>=0.5",
|
|
||||||
"requests >=2.20.0",
|
|
||||||
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
|
|
||||||
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
|
|
||||||
"requests_toolbelt >=0.4.0",
|
|
||||||
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
|
|
||||||
"atomicwrites>=0.1.7",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class PrintRequirements(Command):
|
|
||||||
description = "Prints minimal requirements"
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def finalize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
for requirement in requirements:
|
|
||||||
print(requirement.replace(">", "=").replace(" ", ""))
|
|
||||||
|
|
||||||
|
|
||||||
with open("README.rst") as f:
|
|
||||||
long_description = f.read()
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
# General metadata
|
|
||||||
name="vdirsyncer",
|
|
||||||
author="Markus Unterwaditzer",
|
|
||||||
author_email="markus@unterwaditzer.net",
|
|
||||||
url="https://github.com/pimutils/vdirsyncer",
|
|
||||||
description="Synchronize calendars and contacts",
|
|
||||||
license="BSD",
|
|
||||||
long_description=long_description,
|
|
||||||
# Runtime dependencies
|
|
||||||
install_requires=requirements,
|
|
||||||
# Optional dependencies
|
|
||||||
extras_require={
|
|
||||||
"google": ["requests-oauthlib"],
|
|
||||||
"etesync": ["etesync==0.5.2", "django<2.0"],
|
|
||||||
},
|
|
||||||
# Build dependencies
|
|
||||||
setup_requires=["setuptools_scm != 1.12.0"],
|
|
||||||
# Other
|
|
||||||
packages=find_packages(exclude=["tests.*", "tests"]),
|
|
||||||
include_package_data=True,
|
|
||||||
cmdclass={"minimal_requirements": PrintRequirements},
|
|
||||||
use_scm_version={"write_to": "vdirsyncer/version.py"},
|
|
||||||
entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
|
|
||||||
classifiers=[
|
|
||||||
"Development Status :: 4 - Beta",
|
|
||||||
"Environment :: Console",
|
|
||||||
"License :: OSI Approved :: BSD License",
|
|
||||||
"Operating System :: POSIX",
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Programming Language :: Python :: 3.7",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Topic :: Internet",
|
|
||||||
"Topic :: Utilities",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
hypothesis>=5.0.0,<7.0.0
|
|
||||||
pytest
|
|
||||||
pytest-cov
|
|
||||||
pytest-localserver
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
"""
|
"""
|
||||||
Test suite for vdirsyncer.
|
Test suite for vdirsyncer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
import urllib3.exceptions
|
import urllib3.exceptions
|
||||||
|
|
||||||
|
|
@ -100,10 +103,8 @@ X-SOMETHING:{r}
|
||||||
HAHA:YES
|
HAHA:YES
|
||||||
END:FOO"""
|
END:FOO"""
|
||||||
|
|
||||||
printable_characters_strategy = st.text(
|
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
|
||||||
st.characters(blacklist_categories=("Cc", "Cs"))
|
|
||||||
)
|
|
||||||
|
|
||||||
uid_strategy = st.text(
|
uid_strategy = st.text(
|
||||||
st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
||||||
).filter(lambda x: x.strip() == x)
|
).filter(lambda x: x.strip() == x)
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,19 @@
|
||||||
"""
|
"""
|
||||||
General-purpose fixtures for vdirsyncer's testsuite.
|
General-purpose fixtures for vdirsyncer's testsuite.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import click_log
|
import click_log
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
from hypothesis import HealthCheck
|
from hypothesis import HealthCheck
|
||||||
from hypothesis import settings
|
|
||||||
from hypothesis import Verbosity
|
from hypothesis import Verbosity
|
||||||
|
from hypothesis import settings
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
|
@ -24,7 +29,6 @@ except ImportError:
|
||||||
def benchmark():
|
def benchmark():
|
||||||
return lambda x: x()
|
return lambda x: x()
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
del pytest_benchmark
|
del pytest_benchmark
|
||||||
|
|
||||||
|
|
@ -41,7 +45,7 @@ settings.register_profile(
|
||||||
"deterministic",
|
"deterministic",
|
||||||
settings(
|
settings(
|
||||||
derandomize=True,
|
derandomize=True,
|
||||||
suppress_health_check=HealthCheck.all(),
|
suppress_health_check=list(HealthCheck),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
||||||
|
|
@ -52,3 +56,15 @@ elif os.environ.get("CI", "false").lower() == "true":
|
||||||
settings.load_profile("ci")
|
settings.load_profile("ci")
|
||||||
else:
|
else:
|
||||||
settings.load_profile("dev")
|
settings.load_profile("dev")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def aio_session():
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def aio_connector():
|
||||||
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
|
yield conn
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,20 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import textwrap
|
import textwrap
|
||||||
import uuid
|
import uuid
|
||||||
from urllib.parse import quote as urlquote
|
from urllib.parse import quote as urlquote
|
||||||
from urllib.parse import unquote as urlunquote
|
from urllib.parse import unquote as urlunquote
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
|
||||||
from .. import assert_item_equals
|
from tests import EVENT_TEMPLATE
|
||||||
from .. import EVENT_TEMPLATE
|
from tests import TASK_TEMPLATE
|
||||||
from .. import normalize_item
|
from tests import VCARD_TEMPLATE
|
||||||
from .. import TASK_TEMPLATE
|
from tests import assert_item_equals
|
||||||
from .. import VCARD_TEMPLATE
|
from tests import normalize_item
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.storage.base import normalize_meta_value
|
from vdirsyncer.storage.base import normalize_meta_value
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
@ -46,11 +50,12 @@ class StorageTests:
|
||||||
|
|
||||||
:param collection: The name of the collection to create and use.
|
:param collection: The name of the collection to create and use.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest_asyncio.fixture
|
||||||
def s(self, get_storage_args):
|
async def s(self, get_storage_args):
|
||||||
return self.storage_class(**get_storage_args())
|
rv = self.storage_class(**await get_storage_args())
|
||||||
|
return rv
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_item(self, item_type):
|
def get_item(self, item_type):
|
||||||
|
|
@ -72,180 +77,209 @@ class StorageTests:
|
||||||
if not self.supports_metadata:
|
if not self.supports_metadata:
|
||||||
pytest.skip("This storage does not support metadata.")
|
pytest.skip("This storage does not support metadata.")
|
||||||
|
|
||||||
def test_generic(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_generic(self, s, get_item):
|
||||||
items = [get_item() for i in range(1, 10)]
|
items = [get_item() for i in range(1, 10)]
|
||||||
hrefs = []
|
hrefs = []
|
||||||
for item in items:
|
for item in items:
|
||||||
href, etag = s.upload(item)
|
href, etag = await s.upload(item)
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = s.get(href)
|
_, etag = await s.get(href)
|
||||||
hrefs.append((href, etag))
|
hrefs.append((href, etag))
|
||||||
hrefs.sort()
|
hrefs.sort()
|
||||||
assert hrefs == sorted(s.list())
|
assert hrefs == sorted(await aiostream.stream.list(s.list()))
|
||||||
for href, etag in hrefs:
|
for href, etag in hrefs:
|
||||||
assert isinstance(href, (str, bytes))
|
assert isinstance(href, (str, bytes))
|
||||||
assert isinstance(etag, (str, bytes))
|
assert isinstance(etag, (str, bytes))
|
||||||
assert s.has(href)
|
assert await s.has(href)
|
||||||
item, etag2 = s.get(href)
|
item, etag2 = await s.get(href)
|
||||||
assert etag == etag2
|
assert etag == etag2
|
||||||
|
|
||||||
def test_empty_get_multi(self, s):
|
@pytest.mark.asyncio
|
||||||
assert list(s.get_multi([])) == []
|
async def test_empty_get_multi(self, s):
|
||||||
|
assert await aiostream.stream.list(s.get_multi([])) == []
|
||||||
|
|
||||||
def test_get_multi_duplicates(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
href, etag = s.upload(get_item())
|
async def test_get_multi_duplicates(self, s, get_item):
|
||||||
|
href, etag = await s.upload(get_item())
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = s.get(href)
|
_, etag = await s.get(href)
|
||||||
((href2, item, etag2),) = s.get_multi([href] * 2)
|
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
|
||||||
assert href2 == href
|
assert href2 == href
|
||||||
assert etag2 == etag
|
assert etag2 == etag
|
||||||
|
|
||||||
def test_upload_already_existing(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_already_existing(self, s, get_item):
|
||||||
item = get_item()
|
item = get_item()
|
||||||
s.upload(item)
|
await s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.upload(item)
|
await s.upload(item)
|
||||||
|
|
||||||
def test_upload(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload(self, s, get_item):
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, etag = s.upload(item)
|
href, _etag = await s.upload(item)
|
||||||
assert_item_equals(s.get(href)[0], item)
|
assert_item_equals((await s.get(href))[0], item)
|
||||||
|
|
||||||
def test_update(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_update(self, s, get_item):
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, etag = s.upload(item)
|
href, etag = await s.upload(item)
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = s.get(href)
|
_, etag = await s.get(href)
|
||||||
assert_item_equals(s.get(href)[0], item)
|
assert_item_equals((await s.get(href))[0], item)
|
||||||
|
|
||||||
new_item = get_item(uid=item.uid)
|
new_item = get_item(uid=item.uid)
|
||||||
new_etag = s.update(href, new_item, etag)
|
new_etag = await s.update(href, new_item, etag)
|
||||||
if new_etag is None:
|
if new_etag is None:
|
||||||
_, new_etag = s.get(href)
|
_, new_etag = await s.get(href)
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/48
|
# See https://github.com/pimutils/vdirsyncer/issues/48
|
||||||
assert isinstance(new_etag, (bytes, str))
|
assert isinstance(new_etag, (bytes, str))
|
||||||
assert_item_equals(s.get(href)[0], new_item)
|
assert_item_equals((await s.get(href))[0], new_item)
|
||||||
|
|
||||||
def test_update_nonexisting(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_nonexisting(self, s, get_item):
|
||||||
item = get_item()
|
item = get_item()
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.update("huehue", item, '"123"')
|
await s.update("huehue", item, '"123"')
|
||||||
|
|
||||||
def test_wrong_etag(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_wrong_etag(self, s, get_item):
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, etag = s.upload(item)
|
href, _etag = await s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.update(href, item, '"lolnope"')
|
await s.update(href, item, '"lolnope"')
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.delete(href, '"lolnope"')
|
await s.delete(href, '"lolnope"')
|
||||||
|
|
||||||
def test_delete(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
href, etag = s.upload(get_item())
|
async def test_delete(self, s, get_item):
|
||||||
s.delete(href, etag)
|
href, etag = await s.upload(get_item())
|
||||||
assert not list(s.list())
|
await s.delete(href, etag)
|
||||||
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
def test_delete_nonexisting(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_nonexisting(self, s, get_item):
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.delete("1", '"123"')
|
await s.delete("1", '"123"')
|
||||||
|
|
||||||
def test_list(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
assert not list(s.list())
|
async def test_list(self, s, get_item):
|
||||||
href, etag = s.upload(get_item())
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
href, etag = await s.upload(get_item())
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = s.get(href)
|
_, etag = await s.get(href)
|
||||||
assert list(s.list()) == [(href, etag)]
|
assert await aiostream.stream.list(s.list()) == [(href, etag)]
|
||||||
|
|
||||||
def test_has(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
assert not s.has("asd")
|
async def test_has(self, s, get_item):
|
||||||
href, etag = s.upload(get_item())
|
assert not await s.has("asd")
|
||||||
assert s.has(href)
|
href, etag = await s.upload(get_item())
|
||||||
assert not s.has("asd")
|
assert await s.has(href)
|
||||||
s.delete(href, etag)
|
assert not await s.has("asd")
|
||||||
assert not s.has(href)
|
await s.delete(href, etag)
|
||||||
|
assert not await s.has(href)
|
||||||
|
|
||||||
def test_update_others_stay_the_same(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_others_stay_the_same(self, s, get_item):
|
||||||
info = {}
|
info = {}
|
||||||
for _ in range(4):
|
for _ in range(4):
|
||||||
href, etag = s.upload(get_item())
|
href, etag = await s.upload(get_item())
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = s.get(href)
|
_, etag = await s.get(href)
|
||||||
info[href] = etag
|
info[href] = etag
|
||||||
|
|
||||||
assert {
|
items = await aiostream.stream.list(
|
||||||
href: etag
|
s.get_multi(href for href, etag in info.items())
|
||||||
for href, item, etag in s.get_multi(href for href, etag in info.items())
|
)
|
||||||
} == info
|
assert {href: etag for href, item, etag in items} == info
|
||||||
|
|
||||||
def test_repr(self, s, get_storage_args):
|
def test_repr(self, s):
|
||||||
assert self.storage_class.__name__ in repr(s)
|
assert self.storage_class.__name__ in repr(s)
|
||||||
assert s.instance_name is None
|
assert s.instance_name is None
|
||||||
|
|
||||||
def test_discover(self, requires_collections, get_storage_args, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_discover(
|
||||||
|
self,
|
||||||
|
requires_collections,
|
||||||
|
get_storage_args,
|
||||||
|
get_item,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
collections = set()
|
collections = set()
|
||||||
for i in range(1, 5):
|
for i in range(1, 5):
|
||||||
collection = f"test{i}"
|
collection = f"test{i}"
|
||||||
s = self.storage_class(**get_storage_args(collection=collection))
|
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||||
assert not list(s.list())
|
assert not await aiostream.stream.list(s.list())
|
||||||
s.upload(get_item())
|
await s.upload(get_item())
|
||||||
collections.add(s.collection)
|
collections.add(s.collection)
|
||||||
|
|
||||||
actual = {
|
discovered = await aiostream.stream.list(
|
||||||
c["collection"]
|
self.storage_class.discover(**await get_storage_args(collection=None))
|
||||||
for c in self.storage_class.discover(**get_storage_args(collection=None))
|
)
|
||||||
}
|
actual = {c["collection"] for c in discovered}
|
||||||
|
|
||||||
assert actual >= collections
|
assert actual >= collections
|
||||||
|
|
||||||
def test_create_collection(self, requires_collections, get_storage_args, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_collection(
|
||||||
|
self,
|
||||||
|
requires_collections,
|
||||||
|
get_storage_args,
|
||||||
|
get_item,
|
||||||
|
):
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
||||||
pytest.skip("Manual cleanup would be necessary.")
|
pytest.skip("Manual cleanup would be necessary.")
|
||||||
if getattr(self, "dav_server", "") == "radicale":
|
if getattr(self, "dav_server", "") == "radicale":
|
||||||
pytest.skip("Radicale does not support collection creation")
|
pytest.skip("Radicale does not support collection creation")
|
||||||
|
|
||||||
args = get_storage_args(collection=None)
|
args = await get_storage_args(collection=None)
|
||||||
args["collection"] = "test"
|
args["collection"] = "test"
|
||||||
|
|
||||||
s = self.storage_class(**self.storage_class.create_collection(**args))
|
s = self.storage_class(**await self.storage_class.create_collection(**args))
|
||||||
|
|
||||||
href = s.upload(get_item())[0]
|
href = (await s.upload(get_item()))[0]
|
||||||
assert href in (href for href, etag in s.list())
|
assert href in await aiostream.stream.list(
|
||||||
|
(href async for href, etag in s.list())
|
||||||
|
)
|
||||||
|
|
||||||
def test_discover_collection_arg(self, requires_collections, get_storage_args):
|
@pytest.mark.asyncio
|
||||||
args = get_storage_args(collection="test2")
|
async def test_discover_collection_arg(
|
||||||
|
self, requires_collections, get_storage_args
|
||||||
|
):
|
||||||
|
args = await get_storage_args(collection="test2")
|
||||||
with pytest.raises(TypeError) as excinfo:
|
with pytest.raises(TypeError) as excinfo:
|
||||||
list(self.storage_class.discover(**args))
|
await aiostream.stream.list(self.storage_class.discover(**args))
|
||||||
|
|
||||||
assert "collection argument must not be given" in str(excinfo.value)
|
assert "collection argument must not be given" in str(excinfo.value)
|
||||||
|
|
||||||
def test_collection_arg(self, get_storage_args):
|
@pytest.mark.asyncio
|
||||||
if self.storage_class.storage_name.startswith("etesync"):
|
async def test_collection_arg(self, get_storage_args):
|
||||||
pytest.skip("etesync uses UUIDs.")
|
|
||||||
|
|
||||||
if self.supports_collections:
|
if self.supports_collections:
|
||||||
s = self.storage_class(**get_storage_args(collection="test2"))
|
s = self.storage_class(**await get_storage_args(collection="test2"))
|
||||||
# Can't do stronger assertion because of radicale, which needs a
|
# Can't do stronger assertion because of radicale, which needs a
|
||||||
# fileextension to guess the collection type.
|
# fileextension to guess the collection type.
|
||||||
assert "test2" in s.collection
|
assert "test2" in s.collection
|
||||||
else:
|
else:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
self.storage_class(collection="ayy", **get_storage_args())
|
self.storage_class(collection="ayy", **await get_storage_args())
|
||||||
|
|
||||||
def test_case_sensitive_uids(self, s, get_item):
|
@pytest.mark.asyncio
|
||||||
|
async def test_case_sensitive_uids(self, s, get_item):
|
||||||
if s.storage_name == "filesystem":
|
if s.storage_name == "filesystem":
|
||||||
pytest.skip("Behavior depends on the filesystem.")
|
pytest.skip("Behavior depends on the filesystem.")
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
s.upload(get_item(uid=uid.upper()))
|
await s.upload(get_item(uid=uid.upper()))
|
||||||
s.upload(get_item(uid=uid.lower()))
|
await s.upload(get_item(uid=uid.lower()))
|
||||||
items = [href for href, etag in s.list()]
|
items = [href async for href, etag in s.list()]
|
||||||
assert len(items) == 2
|
assert len(items) == 2
|
||||||
assert len(set(items)) == 2
|
assert len(set(items)) == 2
|
||||||
|
|
||||||
def test_specialchars(
|
@pytest.mark.asyncio
|
||||||
|
async def test_specialchars(
|
||||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||||
):
|
):
|
||||||
if getattr(self, "dav_server", "") == "radicale":
|
|
||||||
pytest.skip("Radicale is fundamentally broken.")
|
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
||||||
pytest.skip("iCloud and FastMail reject this name.")
|
pytest.skip("iCloud and FastMail reject this name.")
|
||||||
|
|
||||||
|
|
@ -254,42 +288,68 @@ class StorageTests:
|
||||||
uid = "test @ foo ät bar град сатану"
|
uid = "test @ foo ät bar град сатану"
|
||||||
collection = "test @ foo ät bar"
|
collection = "test @ foo ät bar"
|
||||||
|
|
||||||
s = self.storage_class(**get_storage_args(collection=collection))
|
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||||
item = get_item(uid=uid)
|
item = get_item(uid=uid)
|
||||||
|
|
||||||
href, etag = s.upload(item)
|
href, etag = await s.upload(item)
|
||||||
item2, etag2 = s.get(href)
|
item2, etag2 = await s.get(href)
|
||||||
if etag is not None:
|
if etag is not None:
|
||||||
assert etag2 == etag
|
assert etag2 == etag
|
||||||
assert_item_equals(item2, item)
|
assert_item_equals(item2, item)
|
||||||
|
|
||||||
((_, etag3),) = s.list()
|
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||||
assert etag2 == etag3
|
assert etag2 == etag3
|
||||||
|
|
||||||
# etesync uses UUIDs for collection names
|
|
||||||
if self.storage_class.storage_name.startswith("etesync"):
|
|
||||||
return
|
|
||||||
|
|
||||||
assert collection in urlunquote(s.collection)
|
assert collection in urlunquote(s.collection)
|
||||||
if self.storage_class.storage_name.endswith("dav"):
|
if self.storage_class.storage_name.endswith("dav"):
|
||||||
assert urlquote(uid, "/@:") in href
|
assert urlquote(uid, "/@:") in href
|
||||||
|
|
||||||
def test_metadata(self, requires_metadata, s):
|
@pytest.mark.asyncio
|
||||||
if not getattr(self, "dav_server", ""):
|
async def test_newline_in_uid(
|
||||||
assert not s.get_meta("color")
|
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||||
assert not s.get_meta("displayname")
|
):
|
||||||
|
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
||||||
|
|
||||||
|
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
|
||||||
|
|
||||||
|
s = self.storage_class(**await get_storage_args())
|
||||||
|
item = get_item(uid=uid)
|
||||||
|
|
||||||
|
href, etag = await s.upload(item)
|
||||||
|
item2, etag2 = await s.get(href)
|
||||||
|
if etag is not None:
|
||||||
|
assert etag2 == etag
|
||||||
|
assert_item_equals(item2, item)
|
||||||
|
|
||||||
|
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||||
|
assert etag2 == etag3
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_empty_metadata(self, requires_metadata, s):
|
||||||
|
if getattr(self, "dav_server", ""):
|
||||||
|
pytest.skip()
|
||||||
|
|
||||||
|
assert await s.get_meta("color") is None
|
||||||
|
assert await s.get_meta("displayname") is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_metadata(self, requires_metadata, s):
|
||||||
|
if getattr(self, "dav_server", "") == "xandikos":
|
||||||
|
pytest.skip("xandikos does not support removing metadata.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
s.set_meta("color", None)
|
await s.set_meta("color", None)
|
||||||
assert not s.get_meta("color")
|
assert await s.get_meta("color") is None
|
||||||
s.set_meta("color", "#ff0000")
|
await s.set_meta("color", "#ff0000")
|
||||||
assert s.get_meta("color") == "#ff0000"
|
assert await s.get_meta("color") == "#ff0000"
|
||||||
except exceptions.UnsupportedMetadataError:
|
except exceptions.UnsupportedMetadataError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_encoding_metadata(self, requires_metadata, s):
|
||||||
for x in ("hello world", "hello wörld"):
|
for x in ("hello world", "hello wörld"):
|
||||||
s.set_meta("displayname", x)
|
await s.set_meta("displayname", x)
|
||||||
rv = s.get_meta("displayname")
|
rv = await s.get_meta("displayname")
|
||||||
assert rv == x
|
assert rv == x
|
||||||
assert isinstance(rv, str)
|
assert isinstance(rv, str)
|
||||||
|
|
||||||
|
|
@ -306,23 +366,25 @@ class StorageTests:
|
||||||
"فلسطين",
|
"فلسطين",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_metadata_normalization(self, requires_metadata, s, value):
|
@pytest.mark.asyncio
|
||||||
x = s.get_meta("displayname")
|
async def test_metadata_normalization(self, requires_metadata, s, value):
|
||||||
|
x = await s.get_meta("displayname")
|
||||||
assert x == normalize_meta_value(x)
|
assert x == normalize_meta_value(x)
|
||||||
|
|
||||||
if not getattr(self, "dav_server", None):
|
if not getattr(self, "dav_server", None):
|
||||||
# ownCloud replaces "" with "unnamed"
|
# ownCloud replaces "" with "unnamed"
|
||||||
s.set_meta("displayname", value)
|
await s.set_meta("displayname", value)
|
||||||
assert s.get_meta("displayname") == normalize_meta_value(value)
|
assert await s.get_meta("displayname") == normalize_meta_value(value)
|
||||||
|
|
||||||
def test_recurring_events(self, s, item_type):
|
@pytest.mark.asyncio
|
||||||
|
async def test_recurring_events(self, s, item_type):
|
||||||
if item_type != "VEVENT":
|
if item_type != "VEVENT":
|
||||||
pytest.skip("This storage instance doesn't support iCalendar.")
|
pytest.skip("This storage instance doesn't support iCalendar.")
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
item = Item(
|
item = Item(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
"""
|
f"""
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
BEGIN:VEVENT
|
BEGIN:VEVENT
|
||||||
|
|
@ -343,7 +405,7 @@ class StorageTests:
|
||||||
BEGIN:VEVENT
|
BEGIN:VEVENT
|
||||||
DTSTART;TZID=UTC:20140128T083000Z
|
DTSTART;TZID=UTC:20140128T083000Z
|
||||||
DTEND;TZID=UTC:20140128T100000Z
|
DTEND;TZID=UTC:20140128T100000Z
|
||||||
RRULE:FREQ=WEEKLY;UNTIL=20141208T213000Z;BYDAY=TU
|
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z
|
||||||
DTSTAMP:20140327T060506Z
|
DTSTAMP:20140327T060506Z
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
CREATED:20131216T033331Z
|
CREATED:20131216T033331Z
|
||||||
|
|
@ -356,13 +418,11 @@ class StorageTests:
|
||||||
TRANSP:OPAQUE
|
TRANSP:OPAQUE
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
""".format(
|
"""
|
||||||
uid=uid
|
|
||||||
)
|
|
||||||
).strip()
|
).strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
href, etag = s.upload(item)
|
href, _etag = await s.upload(item)
|
||||||
|
|
||||||
item2, etag2 = s.get(href)
|
item2, _etag2 = await s.get(href)
|
||||||
assert normalize_item(item) == normalize_item(item2)
|
assert normalize_item(item) == normalize_item(item2)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,14 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import contextlib
|
import contextlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -45,6 +50,7 @@ def dockerised_server(name, container_port, exposed_port):
|
||||||
[
|
[
|
||||||
"docker",
|
"docker",
|
||||||
"run",
|
"run",
|
||||||
|
"--rm",
|
||||||
"--detach",
|
"--detach",
|
||||||
"--publish",
|
"--publish",
|
||||||
f"{exposed_port}:{container_port}",
|
f"{exposed_port}:{container_port}",
|
||||||
|
|
@ -79,32 +85,32 @@ def xandikos_server():
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest_asyncio.fixture
|
||||||
def slow_create_collection(request):
|
async def slow_create_collection(request, aio_connector):
|
||||||
# We need to properly clean up because otherwise we might run into
|
# We need to properly clean up because otherwise we might run into
|
||||||
# storage limits.
|
# storage limits.
|
||||||
to_delete = []
|
to_delete = []
|
||||||
|
|
||||||
def delete_collections():
|
async def inner(cls: type, args: dict, collection_name: str) -> dict:
|
||||||
for s in to_delete:
|
"""Create a collection
|
||||||
s.session.request("DELETE", "")
|
|
||||||
|
|
||||||
request.addfinalizer(delete_collections)
|
Returns args necessary to create a Storage instance pointing to it.
|
||||||
|
"""
|
||||||
|
assert collection_name.startswith("test")
|
||||||
|
|
||||||
def inner(cls, args, collection):
|
# Make each name unique
|
||||||
assert collection.startswith("test")
|
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}"
|
||||||
collection += "-vdirsyncer-ci-" + str(uuid.uuid4())
|
|
||||||
|
|
||||||
args = cls.create_collection(collection, **args)
|
# Create the collection:
|
||||||
s = cls(**args)
|
args = await cls.create_collection(collection_name, **args)
|
||||||
_clear_collection(s)
|
collection = cls(**args)
|
||||||
assert not list(s.list())
|
|
||||||
to_delete.append(s)
|
# Keep collection in a list to be deleted once tests end:
|
||||||
|
to_delete.append(collection)
|
||||||
|
|
||||||
|
assert not await aiostream.stream.list(collection.list())
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
yield inner
|
||||||
|
|
||||||
|
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
|
||||||
def _clear_collection(s):
|
|
||||||
for href, etag in s.list():
|
|
||||||
s.delete(href, etag)
|
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,18 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
import requests.exceptions
|
|
||||||
|
|
||||||
from .. import get_server_mixin
|
|
||||||
from .. import StorageTests
|
|
||||||
from tests import assert_item_equals
|
from tests import assert_item_equals
|
||||||
|
from tests.storage import StorageTests
|
||||||
|
from tests.storage import get_server_mixin
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
|
||||||
dav_server = os.environ.get("DAV_SERVER", "skip")
|
dav_server = os.environ.get("DAV_SERVER", "skip")
|
||||||
ServerMixin = get_server_mixin(dav_server)
|
ServerMixin = get_server_mixin(dav_server)
|
||||||
|
|
||||||
|
|
@ -19,30 +21,33 @@ class DAVStorageTests(ServerMixin, StorageTests):
|
||||||
dav_server = dav_server
|
dav_server = dav_server
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
||||||
def test_dav_broken_item(self, s):
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_broken_item(self, s):
|
||||||
item = Item("HAHA:YES")
|
item = Item("HAHA:YES")
|
||||||
with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)):
|
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)):
|
||||||
s.upload(item)
|
await s.upload(item)
|
||||||
assert not list(s.list())
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||||
def breakdown(*a, **kw):
|
def breakdown(*a, **kw):
|
||||||
raise AssertionError("Expected not to be called.")
|
raise AssertionError("Expected not to be called.")
|
||||||
|
|
||||||
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert list(s.get_multi([])) == []
|
assert list(await aiostream.stream.list(s.get_multi([]))) == []
|
||||||
finally:
|
finally:
|
||||||
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
||||||
monkeypatch.undo()
|
monkeypatch.undo()
|
||||||
|
|
||||||
def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
||||||
if self.dav_server == "radicale":
|
if self.dav_server == "radicale":
|
||||||
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
||||||
|
|
||||||
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
||||||
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
||||||
href, etag = s.upload(item)
|
href, _etag = await s.upload(item)
|
||||||
item2, etag2 = s.get(href)
|
item2, _etag2 = await s.get(href)
|
||||||
assert_item_equals(item, item2)
|
assert_item_equals(item, item2)
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,24 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import datetime
|
import datetime
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
import requests.exceptions
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
from . import dav_server
|
|
||||||
from . import DAVStorageTests
|
|
||||||
from .. import format_item
|
|
||||||
from tests import EVENT_TEMPLATE
|
from tests import EVENT_TEMPLATE
|
||||||
from tests import TASK_TEMPLATE
|
from tests import TASK_TEMPLATE
|
||||||
from tests import VCARD_TEMPLATE
|
from tests import VCARD_TEMPLATE
|
||||||
|
from tests.storage import format_item
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.storage.dav import CalDAVStorage
|
from vdirsyncer.storage.dav import CalDAVStorage
|
||||||
|
|
||||||
|
from . import DAVStorageTests
|
||||||
|
from . import dav_server
|
||||||
|
|
||||||
|
|
||||||
class TestCalDAVStorage(DAVStorageTests):
|
class TestCalDAVStorage(DAVStorageTests):
|
||||||
storage_class = CalDAVStorage
|
storage_class = CalDAVStorage
|
||||||
|
|
@ -21,20 +27,20 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
@pytest.mark.asyncio
|
||||||
def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||||
s = self.storage_class(item_types=(item_type,), **get_storage_args())
|
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
|
||||||
|
|
||||||
try:
|
# Most storages hard-fail, but xandikos doesn't.
|
||||||
s.upload(format_item(VCARD_TEMPLATE))
|
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
|
||||||
except (exceptions.Error, requests.exceptions.HTTPError):
|
await s.upload(format_item(VCARD_TEMPLATE))
|
||||||
pass
|
|
||||||
assert not list(s.list())
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
# The `arg` param is not named `item_types` because that would hit
|
# The `arg` param is not named `item_types` because that would hit
|
||||||
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"arg,calls_num",
|
("arg", "calls_num"),
|
||||||
[
|
[
|
||||||
(("VTODO",), 1),
|
(("VTODO",), 1),
|
||||||
(("VEVENT",), 1),
|
(("VEVENT",), 1),
|
||||||
|
|
@ -44,10 +50,11 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||||
def test_item_types_performance(
|
@pytest.mark.asyncio
|
||||||
|
async def test_item_types_performance(
|
||||||
self, get_storage_args, arg, calls_num, monkeypatch
|
self, get_storage_args, arg, calls_num, monkeypatch
|
||||||
):
|
):
|
||||||
s = self.storage_class(item_types=arg, **get_storage_args())
|
s = self.storage_class(item_types=arg, **await get_storage_args())
|
||||||
old_parse = s._parse_prop_responses
|
old_parse = s._parse_prop_responses
|
||||||
calls = []
|
calls = []
|
||||||
|
|
||||||
|
|
@ -56,17 +63,18 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
return old_parse(*a, **kw)
|
return old_parse(*a, **kw)
|
||||||
|
|
||||||
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
||||||
list(s.list())
|
await aiostream.stream.list(s.list())
|
||||||
assert len(calls) == calls_num
|
assert len(calls) == calls_num
|
||||||
|
|
||||||
@pytest.mark.xfail(
|
@pytest.mark.xfail(
|
||||||
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
||||||
)
|
)
|
||||||
def test_timerange_correctness(self, get_storage_args):
|
@pytest.mark.asyncio
|
||||||
|
async def test_timerange_correctness(self, get_storage_args):
|
||||||
start_date = datetime.datetime(2013, 9, 10)
|
start_date = datetime.datetime(2013, 9, 10)
|
||||||
end_date = datetime.datetime(2013, 9, 13)
|
end_date = datetime.datetime(2013, 9, 13)
|
||||||
s = self.storage_class(
|
s = self.storage_class(
|
||||||
start_date=start_date, end_date=end_date, **get_storage_args()
|
start_date=start_date, end_date=end_date, **await get_storage_args()
|
||||||
)
|
)
|
||||||
|
|
||||||
too_old_item = format_item(
|
too_old_item = format_item(
|
||||||
|
|
@ -123,50 +131,44 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
).strip()
|
).strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
s.upload(too_old_item)
|
await s.upload(too_old_item)
|
||||||
s.upload(too_new_item)
|
await s.upload(too_new_item)
|
||||||
expected_href, _ = s.upload(good_item)
|
expected_href, _ = await s.upload(good_item)
|
||||||
|
|
||||||
((actual_href, _),) = s.list()
|
((actual_href, _),) = await aiostream.stream.list(s.list())
|
||||||
assert actual_href == expected_href
|
assert actual_href == expected_href
|
||||||
|
|
||||||
def test_invalid_resource(self, monkeypatch, get_storage_args):
|
@pytest.mark.asyncio
|
||||||
calls = []
|
async def test_invalid_resource(self, monkeypatch, get_storage_args):
|
||||||
args = get_storage_args(collection=None)
|
args = await get_storage_args(collection=None)
|
||||||
|
|
||||||
def request(session, method, url, **kwargs):
|
with aioresponses() as m:
|
||||||
assert url == args["url"]
|
m.add(args["url"], method="PROPFIND", status=200, body="Hello world")
|
||||||
calls.append(None)
|
|
||||||
|
|
||||||
r = requests.Response()
|
with pytest.raises(ValueError):
|
||||||
r.status_code = 200
|
s = self.storage_class(**args)
|
||||||
r._content = b"Hello World."
|
await aiostream.stream.list(s.list())
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr("requests.sessions.Session.request", request)
|
assert len(m.requests) == 1
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
s = self.storage_class(**args)
|
|
||||||
list(s.list())
|
|
||||||
assert len(calls) == 1
|
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
||||||
)
|
)
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||||
def test_item_types_general(self, s):
|
@pytest.mark.asyncio
|
||||||
event = s.upload(format_item(EVENT_TEMPLATE))[0]
|
async def test_item_types_general(self, s):
|
||||||
task = s.upload(format_item(TASK_TEMPLATE))[0]
|
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0]
|
||||||
|
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
|
||||||
s.item_types = ("VTODO", "VEVENT")
|
s.item_types = ("VTODO", "VEVENT")
|
||||||
|
|
||||||
def hrefs():
|
async def hrefs():
|
||||||
return {href for href, etag in s.list()}
|
return {href async for href, etag in s.list()}
|
||||||
|
|
||||||
assert hrefs() == {event, task}
|
assert await hrefs() == {event, task}
|
||||||
s.item_types = ("VTODO",)
|
s.item_types = ("VTODO",)
|
||||||
assert hrefs() == {task}
|
assert await hrefs() == {task}
|
||||||
s.item_types = ("VEVENT",)
|
s.item_types = ("VEVENT",)
|
||||||
assert hrefs() == {event}
|
assert await hrefs() == {event}
|
||||||
s.item_types = ()
|
s.item_types = ()
|
||||||
assert hrefs() == {event, task}
|
assert await hrefs() == {event, task}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,11 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from . import DAVStorageTests
|
|
||||||
from vdirsyncer.storage.dav import CardDAVStorage
|
from vdirsyncer.storage.dav import CardDAVStorage
|
||||||
|
|
||||||
|
from . import DAVStorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestCardDAVStorage(DAVStorageTests):
|
class TestCardDAVStorage(DAVStorageTests):
|
||||||
storage_class = CardDAVStorage
|
storage_class = CardDAVStorage
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,10 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
||||||
from vdirsyncer.storage.dav import _merge_xml
|
from vdirsyncer.storage.dav import _merge_xml
|
||||||
|
from vdirsyncer.storage.dav import _normalize_href
|
||||||
from vdirsyncer.storage.dav import _parse_xml
|
from vdirsyncer.storage.dav import _parse_xml
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -38,9 +41,19 @@ def test_xml_utilities():
|
||||||
def test_xml_specialchars(char):
|
def test_xml_specialchars(char):
|
||||||
x = _parse_xml(
|
x = _parse_xml(
|
||||||
'<?xml version="1.0" encoding="UTF-8" ?>'
|
'<?xml version="1.0" encoding="UTF-8" ?>'
|
||||||
"<foo>ye{}s\r\n"
|
f"<foo>ye{chr(char)}s\r\n"
|
||||||
"hello</foo>".format(chr(char)).encode("ascii")
|
"hello</foo>".encode("ascii")
|
||||||
)
|
)
|
||||||
|
|
||||||
if char in _BAD_XML_CHARS:
|
if char in _BAD_XML_CHARS:
|
||||||
assert x.text == "yes\nhello"
|
assert x.text == "yes\nhello"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"href",
|
||||||
|
[
|
||||||
|
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_normalize_href(href):
|
||||||
|
assert href == _normalize_href("https://example.com", href)
|
||||||
|
|
|
||||||
Binary file not shown.
|
|
@ -1,122 +0,0 @@
|
||||||
"""
|
|
||||||
Django settings for etesync_server project.
|
|
||||||
|
|
||||||
Generated by 'django-admin startproject' using Django 1.10.6.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/1.10/topics/settings/
|
|
||||||
|
|
||||||
For the full list of settings and their values, see
|
|
||||||
https://docs.djangoproject.com/en/1.10/ref/settings/
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
|
||||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
||||||
|
|
||||||
|
|
||||||
# Quick-start development settings - unsuitable for production
|
|
||||||
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
|
|
||||||
|
|
||||||
# SECURITY WARNING: keep the secret key used in production secret!
|
|
||||||
SECRET_KEY = "d7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#"
|
|
||||||
|
|
||||||
# SECURITY WARNING: don't run with debug turned on in production!
|
|
||||||
DEBUG = True
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = []
|
|
||||||
|
|
||||||
|
|
||||||
# Application definition
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
"django.contrib.admin",
|
|
||||||
"django.contrib.auth",
|
|
||||||
"django.contrib.contenttypes",
|
|
||||||
"django.contrib.sessions",
|
|
||||||
"django.contrib.messages",
|
|
||||||
"django.contrib.staticfiles",
|
|
||||||
"rest_framework",
|
|
||||||
"rest_framework.authtoken",
|
|
||||||
"journal.apps.JournalConfig",
|
|
||||||
]
|
|
||||||
|
|
||||||
MIDDLEWARE = [
|
|
||||||
"django.middleware.security.SecurityMiddleware",
|
|
||||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
|
||||||
"django.middleware.common.CommonMiddleware",
|
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
|
||||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
|
||||||
"django.contrib.messages.middleware.MessageMiddleware",
|
|
||||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
|
||||||
]
|
|
||||||
|
|
||||||
ROOT_URLCONF = "etesync_server.urls"
|
|
||||||
|
|
||||||
TEMPLATES = [
|
|
||||||
{
|
|
||||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
|
||||||
"DIRS": [],
|
|
||||||
"APP_DIRS": True,
|
|
||||||
"OPTIONS": {
|
|
||||||
"context_processors": [
|
|
||||||
"django.template.context_processors.debug",
|
|
||||||
"django.template.context_processors.request",
|
|
||||||
"django.contrib.auth.context_processors.auth",
|
|
||||||
"django.contrib.messages.context_processors.messages",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
WSGI_APPLICATION = "etesync_server.wsgi.application"
|
|
||||||
|
|
||||||
|
|
||||||
# Database
|
|
||||||
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
|
||||||
"NAME": os.environ.get("ETESYNC_DB_PATH", os.path.join(BASE_DIR, "db.sqlite3")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Password validation
|
|
||||||
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
|
|
||||||
|
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # noqa
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Internationalization
|
|
||||||
# https://docs.djangoproject.com/en/1.10/topics/i18n/
|
|
||||||
|
|
||||||
LANGUAGE_CODE = "en-us"
|
|
||||||
|
|
||||||
TIME_ZONE = "UTC"
|
|
||||||
|
|
||||||
USE_I18N = True
|
|
||||||
|
|
||||||
USE_L10N = True
|
|
||||||
|
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
|
||||||
# https://docs.djangoproject.com/en/1.10/howto/static-files/
|
|
||||||
|
|
||||||
STATIC_URL = "/static/"
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
"""etesync_server URL Configuration
|
|
||||||
|
|
||||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
|
||||||
https://docs.djangoproject.com/en/1.10/topics/http/urls/
|
|
||||||
Examples:
|
|
||||||
Function views
|
|
||||||
1. Add an import: from my_app import views
|
|
||||||
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
|
|
||||||
Class-based views
|
|
||||||
1. Add an import: from other_app.views import Home
|
|
||||||
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
|
|
||||||
Including another URLconf
|
|
||||||
1. Import the include() function: from django.conf.urls import url, include
|
|
||||||
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
|
|
||||||
"""
|
|
||||||
from django.conf.urls import include
|
|
||||||
from django.conf.urls import url
|
|
||||||
from journal import views
|
|
||||||
from rest_framework_nested import routers
|
|
||||||
|
|
||||||
router = routers.DefaultRouter()
|
|
||||||
router.register(r"journals", views.JournalViewSet)
|
|
||||||
router.register(r"journal/(?P<journal_uid>[^/]+)", views.EntryViewSet)
|
|
||||||
router.register(r"user", views.UserInfoViewSet)
|
|
||||||
|
|
||||||
journals_router = routers.NestedSimpleRouter(router, r"journals", lookup="journal")
|
|
||||||
journals_router.register(r"members", views.MembersViewSet, base_name="journal-members")
|
|
||||||
journals_router.register(r"entries", views.EntryViewSet, base_name="journal-entries")
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
url(r"^api/v1/", include(router.urls)),
|
|
||||||
url(r"^api/v1/", include(journals_router.urls)),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Adding this just for testing, this shouldn't be here normally
|
|
||||||
urlpatterns += (url(r"^reset/$", views.reset, name="reset_debug"),)
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
"""
|
|
||||||
WSGI config for etesync_server project.
|
|
||||||
|
|
||||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
|
||||||
|
|
||||||
application = get_wsgi_application()
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
|
||||||
try:
|
|
||||||
from django.core.management import execute_from_command_line
|
|
||||||
except ImportError:
|
|
||||||
# The above import may fail for some other reason. Ensure that the
|
|
||||||
# issue is really that Django is missing to avoid masking other
|
|
||||||
# exceptions on Python 2.
|
|
||||||
try:
|
|
||||||
import django # noqa
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError(
|
|
||||||
"Couldn't import Django. Are you sure it's installed and "
|
|
||||||
"available on your PYTHONPATH environment variable? Did you "
|
|
||||||
"forget to activate a virtual environment?"
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
execute_from_command_line(sys.argv)
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a
|
|
||||||
Binary file not shown.
|
|
@ -1,88 +0,0 @@
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from .. import StorageTests
|
|
||||||
from vdirsyncer.storage.etesync import EtesyncCalendars
|
|
||||||
from vdirsyncer.storage.etesync import EtesyncContacts
|
|
||||||
|
|
||||||
|
|
||||||
pytestmark = pytest.mark.skipif(
|
|
||||||
os.getenv("ETESYNC_TESTS", "") != "true", reason="etesync tests disabled"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def etesync_app(tmpdir_factory):
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "etesync_server"))
|
|
||||||
|
|
||||||
db = tmpdir_factory.mktemp("etesync").join("etesync.sqlite")
|
|
||||||
shutil.copy(
|
|
||||||
os.path.join(os.path.dirname(__file__), "etesync_server", "db.sqlite3"), str(db)
|
|
||||||
)
|
|
||||||
|
|
||||||
os.environ["ETESYNC_DB_PATH"] = str(db)
|
|
||||||
from etesync_server.wsgi import application
|
|
||||||
|
|
||||||
return application
|
|
||||||
|
|
||||||
|
|
||||||
class EtesyncTests(StorageTests):
|
|
||||||
|
|
||||||
supports_metadata = False
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(self, request, get_item, tmpdir, etesync_app):
|
|
||||||
import wsgi_intercept
|
|
||||||
import wsgi_intercept.requests_intercept
|
|
||||||
|
|
||||||
wsgi_intercept.requests_intercept.install()
|
|
||||||
wsgi_intercept.add_wsgi_intercept("127.0.0.1", 8000, lambda: etesync_app)
|
|
||||||
|
|
||||||
def teardown():
|
|
||||||
wsgi_intercept.remove_wsgi_intercept("127.0.0.1", 8000)
|
|
||||||
wsgi_intercept.requests_intercept.uninstall()
|
|
||||||
|
|
||||||
request.addfinalizer(teardown)
|
|
||||||
|
|
||||||
with open(
|
|
||||||
os.path.join(os.path.dirname(__file__), "test@localhost/auth_token")
|
|
||||||
) as f:
|
|
||||||
token = f.read().strip()
|
|
||||||
headers = {"Authorization": "Token " + token}
|
|
||||||
r = requests.post(
|
|
||||||
"http://127.0.0.1:8000/reset/", headers=headers, allow_redirects=False
|
|
||||||
)
|
|
||||||
assert r.status_code == 200
|
|
||||||
|
|
||||||
def inner(collection="test"):
|
|
||||||
rv = {
|
|
||||||
"email": "test@localhost",
|
|
||||||
"db_path": str(tmpdir.join("etesync.db")),
|
|
||||||
"secrets_dir": os.path.dirname(__file__),
|
|
||||||
"server_url": "http://127.0.0.1:8000/",
|
|
||||||
}
|
|
||||||
if collection is not None:
|
|
||||||
rv = self.storage_class.create_collection(collection=collection, **rv)
|
|
||||||
return rv
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
|
|
||||||
class TestContacts(EtesyncTests):
|
|
||||||
storage_class = EtesyncContacts
|
|
||||||
|
|
||||||
@pytest.fixture(params=["VCARD"])
|
|
||||||
def item_type(self, request):
|
|
||||||
return request.param
|
|
||||||
|
|
||||||
|
|
||||||
class TestCalendars(EtesyncTests):
|
|
||||||
storage_class = EtesyncCalendars
|
|
||||||
|
|
||||||
@pytest.fixture(params=["VEVENT"])
|
|
||||||
def item_type(self, request):
|
|
||||||
return request.param
|
|
||||||
|
|
@ -1,15 +1,25 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
class ServerMixin:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, request, tmpdir, slow_create_collection, baikal_server):
|
def get_storage_args(
|
||||||
def inner(collection="test"):
|
self,
|
||||||
|
request,
|
||||||
|
tmpdir,
|
||||||
|
slow_create_collection,
|
||||||
|
baikal_server,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
|
async def inner(collection="test"):
|
||||||
base_url = "http://127.0.0.1:8002/"
|
base_url = "http://127.0.0.1:8002/"
|
||||||
args = {
|
args = {
|
||||||
"url": base_url,
|
"url": base_url,
|
||||||
"username": "baikal",
|
"username": "baikal",
|
||||||
"password": "baikal",
|
"password": "baikal",
|
||||||
|
"connector": aio_connector,
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.storage_class.fileext == ".vcf":
|
if self.storage_class.fileext == ".vcf":
|
||||||
|
|
@ -18,7 +28,11 @@ class ServerMixin:
|
||||||
args["url"] = base_url + "cal.php/"
|
args["url"] = base_url + "cal.php/"
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
|
@ -11,7 +13,7 @@ try:
|
||||||
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
||||||
}
|
}
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e)))
|
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flaky(reruns=5)
|
@pytest.mark.flaky(reruns=5)
|
||||||
|
|
@ -23,11 +25,11 @@ class ServerMixin:
|
||||||
elif self.storage_class.fileext == ".vcf":
|
elif self.storage_class.fileext == ".vcf":
|
||||||
pytest.skip("No carddav")
|
pytest.skip("No carddav")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError()
|
raise RuntimeError
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, davical_args, request):
|
def get_storage_args(self, davical_args, request):
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
if collection is None:
|
if collection is None:
|
||||||
return davical_args
|
return davical_args
|
||||||
|
|
||||||
|
|
@ -39,7 +41,8 @@ class ServerMixin:
|
||||||
)
|
)
|
||||||
s = self.storage_class(**args)
|
s = self.storage_class(**args)
|
||||||
if not list(s.list()):
|
if not list(s.list()):
|
||||||
request.addfinalizer(lambda: s.session.request("DELETE", ""))
|
# See: https://stackoverflow.com/a/33984811
|
||||||
|
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
|
||||||
return args
|
return args
|
||||||
|
|
||||||
raise RuntimeError("Failed to find free collection.")
|
raise RuntimeError("Failed to find free collection.")
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -5,16 +7,20 @@ import pytest
|
||||||
|
|
||||||
class ServerMixin:
|
class ServerMixin:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, item_type, slow_create_collection):
|
def get_storage_args(self, slow_create_collection, aio_connector, request):
|
||||||
if item_type == "VTODO":
|
if (
|
||||||
|
"item_type" in request.fixturenames
|
||||||
|
and request.getfixturevalue("item_type") == "VTODO"
|
||||||
|
):
|
||||||
# Fastmail has non-standard support for TODOs
|
# Fastmail has non-standard support for TODOs
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/824
|
# See https://github.com/pimutils/vdirsyncer/issues/824
|
||||||
pytest.skip("Fastmail has non-standard VTODO support.")
|
pytest.skip("Fastmail has non-standard VTODO support.")
|
||||||
|
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
args = {
|
args = {
|
||||||
"username": os.environ["FASTMAIL_USERNAME"],
|
"username": os.environ["FASTMAIL_USERNAME"],
|
||||||
"password": os.environ["FASTMAIL_PASSWORD"],
|
"password": os.environ["FASTMAIL_PASSWORD"],
|
||||||
|
"connector": aio_connector,
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.storage_class.fileext == ".ics":
|
if self.storage_class.fileext == ".ics":
|
||||||
|
|
@ -22,10 +28,15 @@ class ServerMixin:
|
||||||
elif self.storage_class.fileext == ".vcf":
|
elif self.storage_class.fileext == ".vcf":
|
||||||
args["url"] = "https://carddav.fastmail.com/"
|
args["url"] = "https://carddav.fastmail.com/"
|
||||||
else:
|
else:
|
||||||
raise RuntimeError()
|
raise RuntimeError
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -8,10 +10,10 @@ class ServerMixin:
|
||||||
def get_storage_args(self, item_type, slow_create_collection):
|
def get_storage_args(self, item_type, slow_create_collection):
|
||||||
if item_type != "VEVENT":
|
if item_type != "VEVENT":
|
||||||
# iCloud collections can either be calendars or task lists.
|
# iCloud collections can either be calendars or task lists.
|
||||||
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
|
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
|
||||||
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
||||||
|
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
args = {
|
args = {
|
||||||
"username": os.environ["ICLOUD_USERNAME"],
|
"username": os.environ["ICLOUD_USERNAME"],
|
||||||
"password": os.environ["ICLOUD_PASSWORD"],
|
"password": os.environ["ICLOUD_PASSWORD"],
|
||||||
|
|
@ -22,7 +24,7 @@ class ServerMixin:
|
||||||
elif self.storage_class.fileext == ".vcf":
|
elif self.storage_class.fileext == ".vcf":
|
||||||
args["url"] = "https://contacts.icloud.com/"
|
args["url"] = "https://contacts.icloud.com/"
|
||||||
else:
|
else:
|
||||||
raise RuntimeError()
|
raise RuntimeError
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = slow_create_collection(self.storage_class, args, collection)
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -9,17 +11,23 @@ class ServerMixin:
|
||||||
tmpdir,
|
tmpdir,
|
||||||
slow_create_collection,
|
slow_create_collection,
|
||||||
radicale_server,
|
radicale_server,
|
||||||
|
aio_connector,
|
||||||
):
|
):
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
url = "http://127.0.0.1:8001/"
|
url = "http://127.0.0.1:8001/"
|
||||||
args = {
|
args = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"username": "radicale",
|
"username": "radicale",
|
||||||
"password": "radicale",
|
"password": "radicale",
|
||||||
|
"connector": aio_connector,
|
||||||
}
|
}
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -9,13 +11,19 @@ class ServerMixin:
|
||||||
tmpdir,
|
tmpdir,
|
||||||
slow_create_collection,
|
slow_create_collection,
|
||||||
xandikos_server,
|
xandikos_server,
|
||||||
|
aio_connector,
|
||||||
):
|
):
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
url = "http://127.0.0.1:8000/"
|
url = "http://127.0.0.1:8000/"
|
||||||
args = {"url": url}
|
args = {"url": url, "connector": aio_connector}
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,25 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from . import StorageTests
|
|
||||||
from vdirsyncer.storage.filesystem import FilesystemStorage
|
from vdirsyncer.storage.filesystem import FilesystemStorage
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestFilesystemStorage(StorageTests):
|
class TestFilesystemStorage(StorageTests):
|
||||||
storage_class = FilesystemStorage
|
storage_class = FilesystemStorage
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, tmpdir):
|
def get_storage_args(self, tmpdir):
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
|
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
rv = self.storage_class.create_collection(**rv)
|
rv = await self.storage_class.create_collection(**rv)
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
@ -26,7 +30,8 @@ class TestFilesystemStorage(StorageTests):
|
||||||
f.write("stub")
|
f.write("stub")
|
||||||
self.storage_class(str(tmpdir) + "/hue", ".txt")
|
self.storage_class(str(tmpdir) + "/hue", ".txt")
|
||||||
|
|
||||||
def test_broken_data(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_broken_data(self, tmpdir):
|
||||||
s = self.storage_class(str(tmpdir), ".txt")
|
s = self.storage_class(str(tmpdir), ".txt")
|
||||||
|
|
||||||
class BrokenItem:
|
class BrokenItem:
|
||||||
|
|
@ -35,64 +40,71 @@ class TestFilesystemStorage(StorageTests):
|
||||||
ident = uid
|
ident = uid
|
||||||
|
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
s.upload(BrokenItem)
|
await s.upload(BrokenItem)
|
||||||
assert not tmpdir.listdir()
|
assert not tmpdir.listdir()
|
||||||
|
|
||||||
def test_ident_with_slash(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ident_with_slash(self, tmpdir):
|
||||||
s = self.storage_class(str(tmpdir), ".txt")
|
s = self.storage_class(str(tmpdir), ".txt")
|
||||||
s.upload(Item("UID:a/b/c"))
|
await s.upload(Item("UID:a/b/c"))
|
||||||
(item_file,) = tmpdir.listdir()
|
(item_file,) = tmpdir.listdir()
|
||||||
assert "/" not in item_file.basename and item_file.isfile()
|
assert "/" not in item_file.basename
|
||||||
|
assert item_file.isfile()
|
||||||
|
|
||||||
def test_ignore_tmp_files(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_tmp_files(self, tmpdir):
|
||||||
"""Test that files with .tmp suffix beside .ics files are ignored."""
|
"""Test that files with .tmp suffix beside .ics files are ignored."""
|
||||||
s = self.storage_class(str(tmpdir), ".ics")
|
s = self.storage_class(str(tmpdir), ".ics")
|
||||||
s.upload(Item("UID:xyzxyz"))
|
await s.upload(Item("UID:xyzxyz"))
|
||||||
(item_file,) = tmpdir.listdir()
|
(item_file,) = tmpdir.listdir()
|
||||||
item_file.copy(item_file.new(ext="tmp"))
|
item_file.copy(item_file.new(ext="tmp"))
|
||||||
assert len(tmpdir.listdir()) == 2
|
assert len(tmpdir.listdir()) == 2
|
||||||
assert len(list(s.list())) == 1
|
assert len(await aiostream.stream.list(s.list())) == 1
|
||||||
|
|
||||||
def test_ignore_tmp_files_empty_fileext(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_tmp_files_empty_fileext(self, tmpdir):
|
||||||
"""Test that files with .tmp suffix are ignored with empty fileext."""
|
"""Test that files with .tmp suffix are ignored with empty fileext."""
|
||||||
s = self.storage_class(str(tmpdir), "")
|
s = self.storage_class(str(tmpdir), "")
|
||||||
s.upload(Item("UID:xyzxyz"))
|
await s.upload(Item("UID:xyzxyz"))
|
||||||
(item_file,) = tmpdir.listdir()
|
(item_file,) = tmpdir.listdir()
|
||||||
item_file.copy(item_file.new(ext="tmp"))
|
item_file.copy(item_file.new(ext="tmp"))
|
||||||
assert len(tmpdir.listdir()) == 2
|
assert len(tmpdir.listdir()) == 2
|
||||||
# assert False, tmpdir.listdir() # enable to see the created filename
|
# assert False, tmpdir.listdir() # enable to see the created filename
|
||||||
assert len(list(s.list())) == 1
|
assert len(await aiostream.stream.list(s.list())) == 1
|
||||||
|
|
||||||
def test_ignore_files_typical_backup(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_files_typical_backup(self, tmpdir):
|
||||||
"""Test file-name ignorance with typical backup ending ~."""
|
"""Test file-name ignorance with typical backup ending ~."""
|
||||||
ignorext = "~" # without dot
|
ignorext = "~" # without dot
|
||||||
|
|
||||||
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
|
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
|
||||||
storage.upload(Item("UID:xyzxyz"))
|
await storage.upload(Item("UID:xyzxyz"))
|
||||||
(item_file,) = tmpdir.listdir()
|
(item_file,) = tmpdir.listdir()
|
||||||
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
|
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
|
||||||
|
|
||||||
assert len(tmpdir.listdir()) == 2
|
assert len(tmpdir.listdir()) == 2
|
||||||
assert len(list(storage.list())) == 1
|
assert len(await aiostream.stream.list(storage.list())) == 1
|
||||||
|
|
||||||
def test_too_long_uid(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_too_long_uid(self, tmpdir):
|
||||||
storage = self.storage_class(str(tmpdir), ".txt")
|
storage = self.storage_class(str(tmpdir), ".txt")
|
||||||
item = Item("UID:" + "hue" * 600)
|
item = Item("UID:" + "hue" * 600)
|
||||||
|
|
||||||
href, etag = storage.upload(item)
|
href, _etag = await storage.upload(item)
|
||||||
assert item.uid not in href
|
assert item.uid not in href
|
||||||
|
|
||||||
def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
||||||
def check_call_mock(*args, **kwargs):
|
def check_call_mock(*args, **kwargs):
|
||||||
raise AssertionError()
|
raise AssertionError
|
||||||
|
|
||||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||||
|
|
||||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
|
s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
|
||||||
s.upload(Item("UID:a/b/c"))
|
await s.upload(Item("UID:a/b/c"))
|
||||||
|
|
||||||
def test_post_hook_active(self, tmpdir, monkeypatch):
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_post_hook_active(self, tmpdir, monkeypatch):
|
||||||
calls = []
|
calls = []
|
||||||
exe = "foo"
|
exe = "foo"
|
||||||
|
|
||||||
|
|
@ -104,14 +116,17 @@ class TestFilesystemStorage(StorageTests):
|
||||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||||
|
|
||||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
|
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
|
||||||
s.upload(Item("UID:a/b/c"))
|
await s.upload(Item("UID:a/b/c"))
|
||||||
assert calls
|
assert calls
|
||||||
|
|
||||||
def test_ignore_git_dirs(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_git_dirs(self, tmpdir):
|
||||||
tmpdir.mkdir(".git").mkdir("foo")
|
tmpdir.mkdir(".git").mkdir("foo")
|
||||||
tmpdir.mkdir("a")
|
tmpdir.mkdir("a")
|
||||||
tmpdir.mkdir("b")
|
tmpdir.mkdir("b")
|
||||||
assert {c["collection"] for c in self.storage_class.discover(str(tmpdir))} == {
|
|
||||||
"a",
|
expected = {"a", "b"}
|
||||||
"b",
|
actual = {
|
||||||
|
c["collection"] async for c in self.storage_class.discover(str(tmpdir))
|
||||||
}
|
}
|
||||||
|
assert actual == expected
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,22 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
from requests import Response
|
from aioresponses import CallbackResult
|
||||||
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
from tests import normalize_item
|
from tests import normalize_item
|
||||||
from vdirsyncer.exceptions import UserError
|
from vdirsyncer.exceptions import UserError
|
||||||
|
from vdirsyncer.http import BasicAuthMethod
|
||||||
|
from vdirsyncer.http import DigestAuthMethod
|
||||||
|
from vdirsyncer.http import UsageLimitReached
|
||||||
|
from vdirsyncer.http import request
|
||||||
from vdirsyncer.storage.http import HttpStorage
|
from vdirsyncer.storage.http import HttpStorage
|
||||||
from vdirsyncer.storage.http import prepare_auth
|
from vdirsyncer.storage.http import prepare_auth
|
||||||
|
|
||||||
|
|
||||||
def test_list(monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_list(aio_connector):
|
||||||
collection_url = "http://127.0.0.1/calendar/collection.ics"
|
collection_url = "http://127.0.0.1/calendar/collection.ics"
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
|
|
@ -32,67 +41,68 @@ def test_list(monkeypatch):
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2
|
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
|
||||||
|
|
||||||
def get(self, method, url, *a, **kw):
|
def callback(url, headers, **kwargs):
|
||||||
assert method == "GET"
|
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||||
assert url == collection_url
|
|
||||||
r = Response()
|
|
||||||
r.status_code = 200
|
|
||||||
assert responses
|
assert responses
|
||||||
r._content = responses.pop().encode("utf-8")
|
|
||||||
r.headers["Content-Type"] = "text/calendar"
|
|
||||||
r.encoding = "ISO-8859-1"
|
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr("requests.sessions.Session.request", get)
|
return CallbackResult(
|
||||||
|
status=200,
|
||||||
|
body=responses.pop().encode("utf-8"),
|
||||||
|
headers={"Content-Type": "text/calendar; charset=iso-8859-1"},
|
||||||
|
)
|
||||||
|
|
||||||
s = HttpStorage(url=collection_url)
|
with aioresponses() as m:
|
||||||
|
m.get(collection_url, callback=callback, repeat=True)
|
||||||
|
|
||||||
found_items = {}
|
s = HttpStorage(url=collection_url, connector=aio_connector)
|
||||||
|
|
||||||
for href, etag in s.list():
|
found_items = {}
|
||||||
item, etag2 = s.get(href)
|
|
||||||
assert item.uid is not None
|
|
||||||
assert etag2 == etag
|
|
||||||
found_items[normalize_item(item)] = href
|
|
||||||
|
|
||||||
expected = {
|
async for href, etag in s.list():
|
||||||
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
|
item, etag2 = await s.get(href)
|
||||||
}
|
assert item.uid is not None
|
||||||
|
assert etag2 == etag
|
||||||
|
found_items[normalize_item(item)] = href
|
||||||
|
|
||||||
assert set(found_items) == expected
|
expected = {
|
||||||
|
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
|
||||||
|
}
|
||||||
|
|
||||||
for href, etag in s.list():
|
assert set(found_items) == expected
|
||||||
item, etag2 = s.get(href)
|
|
||||||
assert item.uid is not None
|
async for href, etag in s.list():
|
||||||
assert etag2 == etag
|
item, etag2 = await s.get(href)
|
||||||
assert found_items[normalize_item(item)] == href
|
assert item.uid is not None
|
||||||
|
assert etag2 == etag
|
||||||
|
assert found_items[normalize_item(item)] == href
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_param():
|
def test_readonly_param(aio_connector):
|
||||||
|
"""The ``readonly`` param cannot be ``False``."""
|
||||||
|
|
||||||
url = "http://example.com/"
|
url = "http://example.com/"
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
HttpStorage(url=url, read_only=False)
|
HttpStorage(url=url, read_only=False, connector=aio_connector)
|
||||||
|
|
||||||
a = HttpStorage(url=url, read_only=True).read_only
|
a = HttpStorage(url=url, read_only=True, connector=aio_connector)
|
||||||
b = HttpStorage(url=url, read_only=None).read_only
|
b = HttpStorage(url=url, read_only=None, connector=aio_connector)
|
||||||
assert a is b is True
|
|
||||||
|
assert a.read_only is b.read_only is True
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_auth():
|
def test_prepare_auth():
|
||||||
assert prepare_auth(None, "", "") is None
|
assert prepare_auth(None, "", "") is None
|
||||||
|
|
||||||
assert prepare_auth(None, "user", "pwd") == ("user", "pwd")
|
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||||
assert prepare_auth("basic", "user", "pwd") == ("user", "pwd")
|
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
assert prepare_auth("basic", "", "pwd")
|
assert prepare_auth("basic", "", "pwd")
|
||||||
assert "you need to specify username and password" in str(excinfo.value).lower()
|
assert "you need to specify username and password" in str(excinfo.value).lower()
|
||||||
|
|
||||||
from requests.auth import HTTPDigestAuth
|
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
|
||||||
|
|
||||||
assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
prepare_auth("ladida", "user", "pwd")
|
prepare_auth("ladida", "user", "pwd")
|
||||||
|
|
@ -100,24 +110,54 @@ def test_prepare_auth():
|
||||||
assert "unknown authentication method" in str(excinfo.value).lower()
|
assert "unknown authentication method" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_auth_guess(monkeypatch):
|
def test_prepare_auth_guess():
|
||||||
import requests_toolbelt.auth.guess
|
# guess auth is currently not supported
|
||||||
|
|
||||||
assert isinstance(
|
|
||||||
prepare_auth("guess", "user", "pwd"), requests_toolbelt.auth.guess.GuessAuth
|
|
||||||
)
|
|
||||||
|
|
||||||
monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
|
|
||||||
|
|
||||||
with pytest.raises(UserError) as excinfo:
|
with pytest.raises(UserError) as excinfo:
|
||||||
prepare_auth("guess", "user", "pwd")
|
prepare_auth("guess", "usr", "pwd")
|
||||||
|
|
||||||
assert "requests_toolbelt is too old" in str(excinfo.value).lower()
|
assert "not supported" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
def test_verify_false_disallowed():
|
def test_verify_false_disallowed(aio_connector):
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
HttpStorage(url="http://example.com", verify=False)
|
HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
|
||||||
|
|
||||||
assert "forbidden" in str(excinfo.value).lower()
|
assert "must be a path to a pem-file." in str(excinfo.value).lower()
|
||||||
assert "consider setting verify_fingerprint" in str(excinfo.value).lower()
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_403_usage_limit_exceeded(aio_connector):
|
||||||
|
url = "http://127.0.0.1/test_403"
|
||||||
|
error_body = {
|
||||||
|
"error": {
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"domain": "usageLimits",
|
||||||
|
"message": "Calendar usage limits exceeded.",
|
||||||
|
"reason": "quotaExceeded",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"code": 403,
|
||||||
|
"message": "Calendar usage limits exceeded.",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.get(url, status=403, payload=error_body, repeat=True)
|
||||||
|
with pytest.raises(UsageLimitReached):
|
||||||
|
await request("GET", url, session)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_403_without_usage_limits_domain(aio_connector):
|
||||||
|
"""A 403 JSON error without the Google 'usageLimits' domain should not be
|
||||||
|
treated as UsageLimitReached and should surface as ClientResponseError.
|
||||||
|
"""
|
||||||
|
url = "http://127.0.0.1/test_403_no_usage_limits"
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.get(url, status=403, repeat=True)
|
||||||
|
with pytest.raises(aiohttp.ClientResponseError):
|
||||||
|
await request("GET", url, session)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,16 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
from requests import Response
|
from aioresponses import CallbackResult
|
||||||
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
import vdirsyncer.storage.http
|
import vdirsyncer.storage.http
|
||||||
from . import StorageTests
|
|
||||||
from vdirsyncer.storage.base import Storage
|
from vdirsyncer.storage.base import Storage
|
||||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||||
|
|
||||||
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class CombinedStorage(Storage):
|
class CombinedStorage(Storage):
|
||||||
"""A subclass of HttpStorage to make testing easier. It supports writes via
|
"""A subclass of HttpStorage to make testing easier. It supports writes via
|
||||||
|
|
@ -14,32 +19,33 @@ class CombinedStorage(Storage):
|
||||||
_repr_attributes = ("url", "path")
|
_repr_attributes = ("url", "path")
|
||||||
storage_name = "http_and_singlefile"
|
storage_name = "http_and_singlefile"
|
||||||
|
|
||||||
def __init__(self, url, path, **kwargs):
|
def __init__(self, url, path, *, connector, **kwargs):
|
||||||
if kwargs.get("collection", None) is not None:
|
if kwargs.get("collection") is not None:
|
||||||
raise ValueError()
|
raise ValueError
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.url = url
|
self.url = url
|
||||||
self.path = path
|
self.path = path
|
||||||
self._reader = vdirsyncer.storage.http.HttpStorage(url=url)
|
self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector)
|
||||||
self._reader._ignore_uids = False
|
self._reader._ignore_uids = False
|
||||||
self._writer = SingleFileStorage(path=path)
|
self._writer = SingleFileStorage(path=path)
|
||||||
|
|
||||||
def list(self, *a, **kw):
|
async def list(self, *a, **kw):
|
||||||
return self._reader.list(*a, **kw)
|
async for item in self._reader.list(*a, **kw):
|
||||||
|
yield item
|
||||||
|
|
||||||
def get(self, *a, **kw):
|
async def get(self, *a, **kw):
|
||||||
self.list()
|
await aiostream.stream.list(self.list())
|
||||||
return self._reader.get(*a, **kw)
|
return await self._reader.get(*a, **kw)
|
||||||
|
|
||||||
def upload(self, *a, **kw):
|
async def upload(self, *a, **kw):
|
||||||
return self._writer.upload(*a, **kw)
|
return await self._writer.upload(*a, **kw)
|
||||||
|
|
||||||
def update(self, *a, **kw):
|
async def update(self, *a, **kw):
|
||||||
return self._writer.update(*a, **kw)
|
return await self._writer.update(*a, **kw)
|
||||||
|
|
||||||
def delete(self, *a, **kw):
|
async def delete(self, *a, **kw):
|
||||||
return self._writer.delete(*a, **kw)
|
return await self._writer.delete(*a, **kw)
|
||||||
|
|
||||||
|
|
||||||
class TestHttpStorage(StorageTests):
|
class TestHttpStorage(StorageTests):
|
||||||
|
|
@ -51,28 +57,37 @@ class TestHttpStorage(StorageTests):
|
||||||
def setup_tmpdir(self, tmpdir, monkeypatch):
|
def setup_tmpdir(self, tmpdir, monkeypatch):
|
||||||
self.tmpfile = str(tmpdir.ensure("collection.txt"))
|
self.tmpfile = str(tmpdir.ensure("collection.txt"))
|
||||||
|
|
||||||
def _request(method, url, *args, **kwargs):
|
def callback(url, headers, **kwargs):
|
||||||
assert method == "GET"
|
"""Read our tmpfile at request time.
|
||||||
assert url == "http://localhost:123/collection.txt"
|
|
||||||
assert "vdirsyncer" in kwargs["headers"]["User-Agent"]
|
|
||||||
r = Response()
|
|
||||||
r.status_code = 200
|
|
||||||
try:
|
|
||||||
with open(self.tmpfile, "rb") as f:
|
|
||||||
r._content = f.read()
|
|
||||||
except OSError:
|
|
||||||
r._content = b""
|
|
||||||
|
|
||||||
r.headers["Content-Type"] = "text/calendar"
|
We can't just read this during test setup since the file get written to
|
||||||
r.encoding = "utf-8"
|
during test execution.
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr(vdirsyncer.storage.http, "request", _request)
|
It might make sense to actually run a server serving the local file.
|
||||||
|
"""
|
||||||
|
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||||
|
|
||||||
|
with open(self.tmpfile) as f:
|
||||||
|
body = f.read()
|
||||||
|
|
||||||
|
return CallbackResult(
|
||||||
|
status=200,
|
||||||
|
body=body,
|
||||||
|
headers={"Content-Type": "text/calendar; charset=utf-8"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.get("http://localhost:123/collection.txt", callback=callback, repeat=True)
|
||||||
|
yield
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self, aio_connector):
|
||||||
def inner(collection=None):
|
async def inner(collection=None):
|
||||||
assert collection is None
|
assert collection is None
|
||||||
return {"url": "http://localhost:123/collection.txt", "path": self.tmpfile}
|
return {
|
||||||
|
"url": "http://localhost:123/collection.txt",
|
||||||
|
"path": self.tmpfile,
|
||||||
|
"connector": aio_connector,
|
||||||
|
}
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,19 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from . import StorageTests
|
|
||||||
from vdirsyncer.storage.memory import MemoryStorage
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
|
||||||
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestMemoryStorage(StorageTests):
|
class TestMemoryStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = MemoryStorage
|
storage_class = MemoryStorage
|
||||||
supports_collections = False
|
supports_collections = False
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self):
|
||||||
return lambda **kw: kw
|
async def inner(**args):
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,22 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from . import StorageTests
|
|
||||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||||
|
|
||||||
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestSingleFileStorage(StorageTests):
|
class TestSingleFileStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = SingleFileStorage
|
storage_class = SingleFileStorage
|
||||||
supports_metadata = False
|
supports_metadata = False
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, tmpdir):
|
def get_storage_args(self, tmpdir):
|
||||||
def inner(collection="test"):
|
async def inner(collection="test"):
|
||||||
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
|
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
rv = self.storage_class.create_collection(**rv)
|
rv = await self.storage_class.create_collection(**rv)
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import io
|
import io
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
@ -7,7 +9,6 @@ from vdirsyncer import cli
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.cli.config import Config
|
from vdirsyncer.cli.config import Config
|
||||||
|
|
||||||
|
|
||||||
invalid = object()
|
invalid = object()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -25,7 +26,7 @@ def read_config(tmpdir, monkeypatch):
|
||||||
|
|
||||||
|
|
||||||
def test_read_config(read_config):
|
def test_read_config(read_config):
|
||||||
errors, c = read_config(
|
_errors, c = read_config(
|
||||||
"""
|
"""
|
||||||
[general]
|
[general]
|
||||||
status_path = "/tmp/status/"
|
status_path = "/tmp/status/"
|
||||||
|
|
@ -221,3 +222,62 @@ def test_validate_collections_param():
|
||||||
x([["c", None, "b"]])
|
x([["c", None, "b"]])
|
||||||
x([["c", "a", None]])
|
x([["c", "a", None]])
|
||||||
x([["c", None, None]])
|
x([["c", None, None]])
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_implicit_value(read_config):
|
||||||
|
expected_message = "`implicit` parameter must be 'create' or absent"
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = null
|
||||||
|
implicit = "invalid"
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert expected_message in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_implicit_create_only(read_config):
|
||||||
|
"""Test that implicit create works."""
|
||||||
|
errors, c = read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
implicit = "create"
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert not errors
|
||||||
|
pair = c.pairs["my_pair"]
|
||||||
|
assert pair.implicit == "create"
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
@ -151,7 +153,7 @@ def test_discover_direct_path(tmpdir, runner):
|
||||||
def test_null_collection_with_named_collection(tmpdir, runner):
|
def test_null_collection_with_named_collection(tmpdir, runner):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
|
|
@ -159,15 +161,13 @@ def test_null_collection_with_named_collection(tmpdir, runner):
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/foo/"
|
path = "{tmpdir!s}/foo/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "singlefile"
|
type = "singlefile"
|
||||||
path = "{base}/bar.txt"
|
path = "{tmpdir!s}/bar.txt"
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -191,7 +191,7 @@ def test_null_collection_with_named_collection(tmpdir, runner):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"a_requires,b_requires",
|
("a_requires", "b_requires"),
|
||||||
[
|
[
|
||||||
(True, True),
|
(True, True),
|
||||||
(True, False),
|
(True, False),
|
||||||
|
|
@ -206,7 +206,13 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
|
||||||
def __init__(self, require_collection, **kw):
|
def __init__(self, require_collection, **kw):
|
||||||
if require_collection:
|
if require_collection:
|
||||||
assert not kw.get("collection")
|
assert not kw.get("collection")
|
||||||
raise exceptions.CollectionRequired()
|
raise exceptions.CollectionRequired
|
||||||
|
|
||||||
|
async def get(self, href: str):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def list(self) -> list[tuple]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
from vdirsyncer.cli.utils import storage_names
|
from vdirsyncer.cli.utils import storage_names
|
||||||
|
|
||||||
|
|
@ -214,7 +220,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
|
||||||
|
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
|
|
@ -222,14 +228,12 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "test"
|
type = "test"
|
||||||
require_collection = {a}
|
require_collection = {json.dumps(a_requires)}
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "test"
|
type = "test"
|
||||||
require_collection = {b}
|
require_collection = {json.dumps(b_requires)}
|
||||||
""".format(
|
"""
|
||||||
a=json.dumps(a_requires), b=json.dumps(b_requires)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -239,3 +243,45 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
|
||||||
assert (
|
assert (
|
||||||
"One or more storages don't support `collections = null`." in result.output
|
"One or more storages don't support `collections = null`." in result.output
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_showconfig(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["from a"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["showconfig"])
|
||||||
|
assert not result.exception
|
||||||
|
assert json.loads(result.output) == {
|
||||||
|
"storages": [
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": f"{tmpdir}/foo/",
|
||||||
|
"fileext": ".txt",
|
||||||
|
"instance_name": "foo",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": f"{tmpdir}/bar/",
|
||||||
|
"fileext": ".txt",
|
||||||
|
"instance_name": "bar",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,27 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
def test_get_password_from_command(tmpdir, runner):
|
def test_get_password_from_command(tmpdir, runner):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
collections = ["a", "b", "c"]
|
collections = ["a", "b", "c"]
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "filesystem"
|
type.fetch = ["shell", "echo filesystem"]
|
||||||
path = "{base}/foo/"
|
path = "{tmpdir!s}/foo/"
|
||||||
fileext.fetch = ["command", "echo", ".txt"]
|
fileext.fetch = ["command", "echo", ".txt"]
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/bar/"
|
path = "{tmpdir!s}/bar/"
|
||||||
fileext.fetch = ["prompt", "Fileext for bar"]
|
fileext.fetch = ["prompt", "Fileext for bar"]
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -39,7 +41,7 @@ def test_basic(storage, runner, collection):
|
||||||
assert not result.exception
|
assert not result.exception
|
||||||
assert "No UID" in result.output
|
assert "No UID" in result.output
|
||||||
assert "'toobroken.txt' is malformed beyond repair" in result.output
|
assert "'toobroken.txt' is malformed beyond repair" in result.output
|
||||||
(new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)]
|
(new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x))
|
||||||
assert "UID:" in new_fname.read()
|
assert "UID:" in new_fname.read()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -56,7 +58,7 @@ def test_repair_uids(storage, runner, repair_uids):
|
||||||
else:
|
else:
|
||||||
opt = ["--no-repair-unsafe-uid"]
|
opt = ["--no-repair-unsafe-uid"]
|
||||||
|
|
||||||
result = runner.invoke(["repair"] + opt + ["foo"], input="y")
|
result = runner.invoke(["repair", *opt, "foo"], input="y")
|
||||||
assert not result.exception
|
assert not result.exception
|
||||||
|
|
||||||
if repair_uids:
|
if repair_uids:
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
@ -50,41 +52,6 @@ def test_sync_inexistant_pair(tmpdir, runner):
|
||||||
assert "pair foo does not exist." in result.output.lower()
|
assert "pair foo does not exist." in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
def test_debug_connections(tmpdir, runner):
|
|
||||||
runner.write_with_general(
|
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
[pair my_pair]
|
|
||||||
a = "my_a"
|
|
||||||
b = "my_b"
|
|
||||||
collections = null
|
|
||||||
|
|
||||||
[storage my_a]
|
|
||||||
type = "filesystem"
|
|
||||||
path = "{0}/path_a/"
|
|
||||||
fileext = ".txt"
|
|
||||||
|
|
||||||
[storage my_b]
|
|
||||||
type = "filesystem"
|
|
||||||
path = "{0}/path_b/"
|
|
||||||
fileext = ".txt"
|
|
||||||
"""
|
|
||||||
).format(str(tmpdir))
|
|
||||||
)
|
|
||||||
|
|
||||||
tmpdir.mkdir("path_a")
|
|
||||||
tmpdir.mkdir("path_b")
|
|
||||||
|
|
||||||
result = runner.invoke(["discover"])
|
|
||||||
assert not result.exception
|
|
||||||
|
|
||||||
result = runner.invoke(["-vdebug", "sync", "--max-workers=3"])
|
|
||||||
assert "using 3 maximal workers" in result.output.lower()
|
|
||||||
|
|
||||||
result = runner.invoke(["-vdebug", "sync"])
|
|
||||||
assert "using 1 maximal workers" in result.output.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_empty_storage(tmpdir, runner):
|
def test_empty_storage(tmpdir, runner):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
|
|
@ -123,9 +90,7 @@ def test_empty_storage(tmpdir, runner):
|
||||||
result = runner.invoke(["sync"])
|
result = runner.invoke(["sync"])
|
||||||
lines = result.output.splitlines()
|
lines = result.output.splitlines()
|
||||||
assert lines[0] == "Syncing my_pair"
|
assert lines[0] == "Syncing my_pair"
|
||||||
assert lines[1].startswith(
|
assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.')
|
||||||
"error: my_pair: " 'Storage "my_b" was completely emptied.'
|
|
||||||
)
|
|
||||||
assert result.exception
|
assert result.exception
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -313,27 +278,24 @@ def test_multiple_pairs(tmpdir, runner):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_create_collections(collections, tmpdir, runner):
|
def test_create_collections(collections, tmpdir, runner):
|
||||||
|
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
collections = {colls}
|
collections = {json.dumps(list(collections))}
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/foo/"
|
path = "{tmpdir!s}/foo/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/bar/"
|
path = "{tmpdir!s}/bar/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir), colls=json.dumps(list(collections))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -351,7 +313,7 @@ def test_create_collections(collections, tmpdir, runner):
|
||||||
def test_ident_conflict(tmpdir, runner):
|
def test_ident_conflict(tmpdir, runner):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
|
|
@ -359,16 +321,14 @@ def test_ident_conflict(tmpdir, runner):
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/foo/"
|
path = "{tmpdir!s}/foo/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/bar/"
|
path = "{tmpdir!s}/bar/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -388,20 +348,17 @@ def test_ident_conflict(tmpdir, runner):
|
||||||
'error: foobar: Storage "foo" contains multiple items with the '
|
'error: foobar: Storage "foo" contains multiple items with the '
|
||||||
"same UID or even content"
|
"same UID or even content"
|
||||||
) in result.output
|
) in result.output
|
||||||
assert (
|
assert sorted(
|
||||||
sorted(
|
[
|
||||||
[
|
"one.txt" in result.output,
|
||||||
"one.txt" in result.output,
|
"two.txt" in result.output,
|
||||||
"two.txt" in result.output,
|
"three.txt" in result.output,
|
||||||
"three.txt" in result.output,
|
]
|
||||||
]
|
) == [False, True, True]
|
||||||
)
|
|
||||||
== [False, True, True]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"existing,missing",
|
("existing", "missing"),
|
||||||
[
|
[
|
||||||
("foo", "bar"),
|
("foo", "bar"),
|
||||||
("bar", "foo"),
|
("bar", "foo"),
|
||||||
|
|
@ -410,7 +367,7 @@ def test_ident_conflict(tmpdir, runner):
|
||||||
def test_unknown_storage(tmpdir, runner, existing, missing):
|
def test_unknown_storage(tmpdir, runner, existing, missing):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
|
|
@ -418,11 +375,9 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
|
||||||
|
|
||||||
[storage {existing}]
|
[storage {existing}]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{base}/{existing}/"
|
path = "{tmpdir!s}/{existing}/"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir), existing=existing
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -432,10 +387,8 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
|
||||||
assert result.exception
|
assert result.exception
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
"Storage '{missing}' not found. "
|
f"Storage '{missing}' not found. "
|
||||||
"These are the configured storages: ['{existing}']".format(
|
f"These are the configured storages: ['{existing}']"
|
||||||
missing=missing, existing=existing
|
|
||||||
)
|
|
||||||
) in result.output
|
) in result.output
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -444,36 +397,34 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
|
||||||
runner.write_with_general("")
|
runner.write_with_general("")
|
||||||
|
|
||||||
result = runner.invoke([cmd])
|
result = runner.invoke([cmd])
|
||||||
assert result.output == "critical: Nothing to do.\n"
|
assert result.output == ""
|
||||||
assert result.exception.code == 5
|
assert not result.exception
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"resolution,expect_foo,expect_bar",
|
("resolution", "expect_foo", "expect_bar"),
|
||||||
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
|
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
|
||||||
)
|
)
|
||||||
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
|
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
|
||||||
runner.write_with_general(
|
runner.write_with_general(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
[pair foobar]
|
[pair foobar]
|
||||||
a = "foo"
|
a = "foo"
|
||||||
b = "bar"
|
b = "bar"
|
||||||
collections = null
|
collections = null
|
||||||
conflict_resolution = {val}
|
conflict_resolution = {json.dumps(resolution)}
|
||||||
|
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
path = "{base}/foo"
|
path = "{tmpdir!s}/foo"
|
||||||
|
|
||||||
[storage bar]
|
[storage bar]
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
fileext = ".txt"
|
fileext = ".txt"
|
||||||
path = "{base}/bar"
|
path = "{tmpdir!s}/bar"
|
||||||
""".format(
|
"""
|
||||||
base=str(tmpdir), val=json.dumps(resolution)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -565,13 +516,11 @@ def test_fetch_only_necessary_params(tmpdir, runner):
|
||||||
fetch_script = tmpdir.join("fetch_script")
|
fetch_script = tmpdir.join("fetch_script")
|
||||||
fetch_script.write(
|
fetch_script.write(
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
f"""
|
||||||
set -e
|
set -e
|
||||||
touch "{}"
|
touch "{fetched_file!s}"
|
||||||
echo ".txt"
|
echo ".txt"
|
||||||
""".format(
|
"""
|
||||||
str(fetched_file)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -602,9 +551,7 @@ def test_fetch_only_necessary_params(tmpdir, runner):
|
||||||
type = "filesystem"
|
type = "filesystem"
|
||||||
path = "{path}"
|
path = "{path}"
|
||||||
fileext.fetch = ["command", "sh", "{script}"]
|
fileext.fetch = ["command", "sh", "{script}"]
|
||||||
""".format(
|
""".format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script))
|
||||||
path=str(tmpdir.mkdir("bogus")), script=str(fetch_script)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,7 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.cli.utils import handle_cli_error
|
from vdirsyncer.cli.utils import handle_cli_error
|
||||||
from vdirsyncer.cli.utils import storage_instance_from_config
|
from vdirsyncer.cli.utils import storage_instance_from_config
|
||||||
|
|
@ -10,16 +14,18 @@ def test_handle_cli_error(capsys):
|
||||||
except BaseException:
|
except BaseException:
|
||||||
handle_cli_error()
|
handle_cli_error()
|
||||||
|
|
||||||
out, err = capsys.readouterr()
|
_out, err = capsys.readouterr()
|
||||||
assert "returned something vdirsyncer doesn't understand" in err
|
assert "returned something vdirsyncer doesn't understand" in err
|
||||||
assert "ayy lmao" in err
|
assert "ayy lmao" in err
|
||||||
|
|
||||||
|
|
||||||
def test_storage_instance_from_config(monkeypatch):
|
@pytest.mark.asyncio
|
||||||
def lol(**kw):
|
async def test_storage_instance_from_config(monkeypatch, aio_connector):
|
||||||
assert kw == {"foo": "bar", "baz": 1}
|
class Dummy:
|
||||||
return "OK"
|
def __init__(self, **kw):
|
||||||
|
assert kw == {"foo": "bar", "baz": 1}
|
||||||
|
|
||||||
monkeypatch.setitem(storage_names._storages, "lol", lol)
|
monkeypatch.setitem(storage_names._storages, "lol", Dummy)
|
||||||
config = {"type": "lol", "foo": "bar", "baz": 1}
|
config = {"type": "lol", "foo": "bar", "baz": 1}
|
||||||
assert storage_instance_from_config(config) == "OK"
|
storage = await storage_instance_from_config(config, connector=aio_connector)
|
||||||
|
assert isinstance(storage, Dummy)
|
||||||
|
|
|
||||||
28
tests/system/conftest.py
Normal file
28
tests/system/conftest.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trustme
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def ca():
|
||||||
|
return trustme.CA()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def localhost_cert(ca):
|
||||||
|
return ca.issue_cert("localhost")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def httpserver_ssl_context(localhost_cert):
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
|
||||||
|
crt = localhost_cert.cert_chain_pems[0]
|
||||||
|
key = localhost_cert.private_key_pem
|
||||||
|
with crt.tempfile() as crt_file, key.tempfile() as key_file:
|
||||||
|
context.load_cert_chain(crt_file, key_file)
|
||||||
|
|
||||||
|
return context
|
||||||
|
|
@ -1,9 +1,12 @@
|
||||||
import logging
|
from __future__ import annotations
|
||||||
import sys
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import click_log
|
import click_log
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
|
||||||
from vdirsyncer import http
|
from vdirsyncer import http
|
||||||
from vdirsyncer import utils
|
from vdirsyncer import utils
|
||||||
|
|
@ -19,50 +22,90 @@ def test_get_storage_init_args():
|
||||||
from vdirsyncer.storage.memory import MemoryStorage
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
|
||||||
all, required = utils.get_storage_init_args(MemoryStorage)
|
all, required = utils.get_storage_init_args(MemoryStorage)
|
||||||
assert all == {"fileext", "collection", "read_only", "instance_name"}
|
assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"}
|
||||||
assert not required
|
assert not required
|
||||||
|
|
||||||
|
|
||||||
def test_request_ssl():
|
@pytest.mark.asyncio
|
||||||
with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
|
async def test_request_ssl():
|
||||||
http.request("GET", "https://self-signed.badssl.com/")
|
async with aiohttp.ClientSession() as session:
|
||||||
assert "certificate verify failed" in str(excinfo.value)
|
with pytest.raises(
|
||||||
|
aiohttp.ClientConnectorCertificateError,
|
||||||
http.request("GET", "https://self-signed.badssl.com/", verify=False)
|
match="certificate verify failed",
|
||||||
|
):
|
||||||
|
await http.request(
|
||||||
|
"GET",
|
||||||
|
"https://self-signed.badssl.com/",
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _fingerprints_broken():
|
@pytest.mark.xfail(reason="feature not implemented")
|
||||||
from pkg_resources import parse_version as ver
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_unsafe_ssl():
|
||||||
broken_urllib3 = ver(requests.__version__) <= ver("2.5.1")
|
async with aiohttp.ClientSession() as session:
|
||||||
return broken_urllib3
|
await http.request(
|
||||||
|
"GET",
|
||||||
|
"https://self-signed.badssl.com/",
|
||||||
|
verify=False,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
def fingerprint_of_cert(cert, hash=hashes.SHA256) -> str:
|
||||||
_fingerprints_broken(), reason="https://github.com/shazow/urllib3/issues/529"
|
return x509.load_pem_x509_certificate(cert.bytes()).fingerprint(hash()).hex()
|
||||||
)
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"fingerprint",
|
|
||||||
[
|
|
||||||
"94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA",
|
|
||||||
"19:90:F7:23:94:F2:EF:AB:2B:64:2D:57:3D:25:95:2D",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_request_ssl_fingerprints(httpsserver, fingerprint):
|
|
||||||
httpsserver.serve_content("") # we need to serve something
|
|
||||||
|
|
||||||
http.request("GET", httpsserver.url, verify=False, verify_fingerprint=fingerprint)
|
|
||||||
with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
|
|
||||||
http.request("GET", httpsserver.url, verify_fingerprint=fingerprint)
|
|
||||||
|
|
||||||
with pytest.raises(requests.exceptions.ConnectionError) as excinfo:
|
@pytest.mark.parametrize("hash_algorithm", [hashes.SHA256])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_ssl_leaf_fingerprint(
|
||||||
|
httpserver,
|
||||||
|
localhost_cert,
|
||||||
|
hash_algorithm,
|
||||||
|
aio_session,
|
||||||
|
):
|
||||||
|
fingerprint = fingerprint_of_cert(localhost_cert.cert_chain_pems[0], hash_algorithm)
|
||||||
|
bogus = "".join(reversed(fingerprint))
|
||||||
|
|
||||||
|
# We have to serve something:
|
||||||
|
httpserver.expect_request("/").respond_with_data("OK")
|
||||||
|
url = f"https://127.0.0.1:{httpserver.port}/"
|
||||||
|
|
||||||
|
ssl = http.prepare_verify(None, fingerprint)
|
||||||
|
await http.request("GET", url, ssl=ssl, session=aio_session)
|
||||||
|
|
||||||
|
ssl = http.prepare_verify(None, bogus)
|
||||||
|
with pytest.raises(aiohttp.ServerFingerprintMismatch):
|
||||||
|
await http.request("GET", url, ssl=ssl, session=aio_session)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="Not implemented")
|
||||||
|
@pytest.mark.parametrize("hash_algorithm", [hashes.SHA256])
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_ssl_ca_fingerprints(httpserver, ca, hash_algorithm, aio_session):
|
||||||
|
fingerprint = fingerprint_of_cert(ca.cert_pem)
|
||||||
|
bogus = "".join(reversed(fingerprint))
|
||||||
|
|
||||||
|
# We have to serve something:
|
||||||
|
httpserver.expect_request("/").respond_with_data("OK")
|
||||||
|
url = f"https://127.0.0.1:{httpserver.port}/"
|
||||||
|
|
||||||
|
await http.request(
|
||||||
|
"GET",
|
||||||
|
url,
|
||||||
|
verify=False,
|
||||||
|
verify_fingerprint=fingerprint,
|
||||||
|
session=aio_session,
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(aiohttp.ServerFingerprintMismatch):
|
||||||
http.request(
|
http.request(
|
||||||
"GET",
|
"GET",
|
||||||
httpsserver.url,
|
url,
|
||||||
verify=False,
|
verify=False,
|
||||||
verify_fingerprint="".join(reversed(fingerprint)),
|
verify_fingerprint=bogus,
|
||||||
|
session=aio_session,
|
||||||
)
|
)
|
||||||
assert "Fingerprints did not match" in str(excinfo.value)
|
|
||||||
|
|
||||||
|
|
||||||
def test_open_graphical_browser(monkeypatch):
|
def test_open_graphical_browser(monkeypatch):
|
||||||
|
|
@ -70,10 +113,7 @@ def test_open_graphical_browser(monkeypatch):
|
||||||
|
|
||||||
# Just assert that this internal attribute still exists and behaves the way
|
# Just assert that this internal attribute still exists and behaves the way
|
||||||
# expected
|
# expected
|
||||||
if sys.version_info < (3, 7):
|
assert webbrowser._tryorder is None
|
||||||
iter(webbrowser._tryorder)
|
|
||||||
else:
|
|
||||||
assert webbrowser._tryorder is None
|
|
||||||
|
|
||||||
monkeypatch.setattr("webbrowser._tryorder", [])
|
monkeypatch.setattr("webbrowser._tryorder", [])
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from vdirsyncer.cli.config import _resolve_conflict_via_command
|
from vdirsyncer.cli.config import _resolve_conflict_via_command
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,15 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.cli.discover import expand_collections
|
from vdirsyncer.cli.discover import expand_collections
|
||||||
|
|
||||||
|
|
||||||
missing = object()
|
missing = object()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"shortcuts,expected",
|
("shortcuts", "expected"),
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
["from a"],
|
["from a"],
|
||||||
|
|
@ -132,35 +134,38 @@ missing = object()
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_expand_collections(shortcuts, expected):
|
@pytest.mark.asyncio
|
||||||
|
async def test_expand_collections(shortcuts, expected):
|
||||||
config_a = {"type": "fooboo", "storage_side": "a"}
|
config_a = {"type": "fooboo", "storage_side": "a"}
|
||||||
|
|
||||||
config_b = {"type": "fooboo", "storage_side": "b"}
|
config_b = {"type": "fooboo", "storage_side": "b"}
|
||||||
|
|
||||||
def get_discovered_a():
|
async def get_discovered_a():
|
||||||
return {
|
return {
|
||||||
"c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
|
"c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"},
|
||||||
"c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
|
"c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"},
|
||||||
"a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
|
"a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"},
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_discovered_b():
|
async def get_discovered_b():
|
||||||
return {
|
return {
|
||||||
"c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
|
"c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"},
|
||||||
"c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
|
"c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"},
|
||||||
"b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
|
"b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"},
|
||||||
}
|
}
|
||||||
|
|
||||||
assert (
|
async def handle_not_found(config, collection):
|
||||||
sorted(
|
return missing
|
||||||
|
|
||||||
|
assert sorted(
|
||||||
|
await aiostream.stream.list(
|
||||||
expand_collections(
|
expand_collections(
|
||||||
shortcuts,
|
shortcuts,
|
||||||
config_a,
|
config_a,
|
||||||
config_b,
|
config_b,
|
||||||
get_discovered_a,
|
get_discovered_a,
|
||||||
get_discovered_b,
|
get_discovered_b,
|
||||||
lambda config, collection: missing,
|
handle_not_found,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
== sorted(expected)
|
) == sorted(expected)
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
|
@ -6,8 +8,8 @@ import pytest
|
||||||
from hypothesis import given
|
from hypothesis import given
|
||||||
|
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.cli.fetchparams import expand_fetch_params
|
|
||||||
from vdirsyncer.cli.fetchparams import STRATEGIES
|
from vdirsyncer.cli.fetchparams import STRATEGIES
|
||||||
|
from vdirsyncer.cli.fetchparams import expand_fetch_params
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|
@ -106,7 +108,7 @@ def test_failed_strategy(monkeypatch, value_cache):
|
||||||
|
|
||||||
def strategy(x):
|
def strategy(x):
|
||||||
calls.append(x)
|
calls.append(x)
|
||||||
raise KeyboardInterrupt()
|
raise KeyboardInterrupt
|
||||||
|
|
||||||
monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)
|
monkeypatch.setitem(STRATEGIES, "mystrategy", strategy)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,13 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
from hypothesis import assume
|
from hypothesis import assume
|
||||||
from hypothesis import given
|
from hypothesis import given
|
||||||
|
|
||||||
from vdirsyncer.sync.status import SqliteStatus
|
from vdirsyncer.sync.status import SqliteStatus
|
||||||
|
|
||||||
|
|
||||||
status_dict_strategy = st.dictionaries(
|
status_dict_strategy = st.dictionaries(
|
||||||
st.text(),
|
st.text(),
|
||||||
st.tuples(
|
st.tuples(
|
||||||
|
|
@ -23,13 +26,13 @@ def test_legacy_status(status_dict):
|
||||||
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
|
hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()}
|
||||||
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
|
hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()}
|
||||||
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
|
assume(len(hrefs_a) == len(status_dict) == len(hrefs_b))
|
||||||
status = SqliteStatus()
|
with contextlib.closing(SqliteStatus()) as status:
|
||||||
status.load_legacy_status(status_dict)
|
status.load_legacy_status(status_dict)
|
||||||
assert dict(status.to_legacy_status()) == status_dict
|
assert dict(status.to_legacy_status()) == status_dict
|
||||||
|
|
||||||
for ident, (meta_a, meta_b) in status_dict.items():
|
for ident, (meta_a, meta_b) in status_dict.items():
|
||||||
ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
|
ident_a, meta2_a = status.get_by_href_a(meta_a["href"])
|
||||||
ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
|
ident_b, meta2_b = status.get_by_href_b(meta_b["href"])
|
||||||
assert meta2_a.to_status() == meta_a
|
assert meta2_a.to_status() == meta_a
|
||||||
assert meta2_b.to_status() == meta_b
|
assert meta2_b.to_status() == meta_b
|
||||||
assert ident_a == ident_b == ident
|
assert ident_a == ident_b == ident
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,21 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import contextlib
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
import pytest
|
import pytest
|
||||||
from hypothesis import assume
|
from hypothesis import assume
|
||||||
from hypothesis.stateful import Bundle
|
from hypothesis.stateful import Bundle
|
||||||
from hypothesis.stateful import rule
|
|
||||||
from hypothesis.stateful import RuleBasedStateMachine
|
from hypothesis.stateful import RuleBasedStateMachine
|
||||||
|
from hypothesis.stateful import rule
|
||||||
|
|
||||||
from tests import blow_up
|
from tests import blow_up
|
||||||
from tests import uid_strategy
|
from tests import uid_strategy
|
||||||
from vdirsyncer.storage.memory import _random_string
|
|
||||||
from vdirsyncer.storage.memory import MemoryStorage
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
from vdirsyncer.storage.memory import _random_string
|
||||||
from vdirsyncer.sync import sync as _sync
|
from vdirsyncer.sync import sync as _sync
|
||||||
from vdirsyncer.sync.exceptions import BothReadOnly
|
from vdirsyncer.sync.exceptions import BothReadOnly
|
||||||
from vdirsyncer.sync.exceptions import IdentConflict
|
from vdirsyncer.sync.exceptions import IdentConflict
|
||||||
|
|
@ -21,13 +26,12 @@ from vdirsyncer.sync.status import SqliteStatus
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
|
||||||
def sync(a, b, status, *args, **kwargs):
|
async def sync(a, b, status, *args, **kwargs) -> None:
|
||||||
new_status = SqliteStatus(":memory:")
|
with contextlib.closing(SqliteStatus(":memory:")) as new_status:
|
||||||
new_status.load_legacy_status(status)
|
new_status.load_legacy_status(status)
|
||||||
rv = _sync(a, b, new_status, *args, **kwargs)
|
await _sync(a, b, new_status, *args, **kwargs)
|
||||||
status.clear()
|
status.clear()
|
||||||
status.update(new_status.to_legacy_status())
|
status.update(new_status.to_legacy_status())
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
def empty_storage(x):
|
def empty_storage(x):
|
||||||
|
|
@ -38,45 +42,49 @@ def items(s):
|
||||||
return {x[1].raw for x in s.items.values()}
|
return {x[1].raw for x in s.items.values()}
|
||||||
|
|
||||||
|
|
||||||
def test_irrelevant_status():
|
@pytest.mark.asyncio
|
||||||
|
async def test_irrelevant_status():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {"1": ("1", 1234, "1.ics", 2345)}
|
status = {"1": ("1", 1234, "1.ics", 2345)}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert not status
|
assert not status
|
||||||
assert not items(a)
|
assert not items(a)
|
||||||
assert not items(b)
|
assert not items(b)
|
||||||
|
|
||||||
|
|
||||||
def test_missing_status():
|
@pytest.mark.asyncio
|
||||||
|
async def test_missing_status():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
item = Item("asdf")
|
item = Item("asdf")
|
||||||
a.upload(item)
|
await a.upload(item)
|
||||||
b.upload(item)
|
await b.upload(item)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert len(status) == 1
|
assert len(status) == 1
|
||||||
assert items(a) == items(b) == {item.raw}
|
assert items(a) == items(b) == {item.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_missing_status_and_different_items():
|
@pytest.mark.asyncio
|
||||||
|
async def test_missing_status_and_different_items():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
|
|
||||||
status = {}
|
status = {}
|
||||||
item1 = Item("UID:1\nhaha")
|
item1 = Item("UID:1\nhaha")
|
||||||
item2 = Item("UID:1\nhoho")
|
item2 = Item("UID:1\nhoho")
|
||||||
a.upload(item1)
|
await a.upload(item1)
|
||||||
b.upload(item2)
|
await b.upload(item2)
|
||||||
with pytest.raises(SyncConflict):
|
with pytest.raises(SyncConflict):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert not status
|
assert not status
|
||||||
sync(a, b, status, conflict_resolution="a wins")
|
await sync(a, b, status, conflict_resolution="a wins")
|
||||||
assert items(a) == items(b) == {item1.raw}
|
assert items(a) == items(b) == {item1.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_read_only_and_prefetch():
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_only_and_prefetch():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
b.read_only = True
|
b.read_only = True
|
||||||
|
|
@ -84,147 +92,156 @@ def test_read_only_and_prefetch():
|
||||||
status = {}
|
status = {}
|
||||||
item1 = Item("UID:1\nhaha")
|
item1 = Item("UID:1\nhaha")
|
||||||
item2 = Item("UID:2\nhoho")
|
item2 = Item("UID:2\nhoho")
|
||||||
a.upload(item1)
|
await a.upload(item1)
|
||||||
a.upload(item2)
|
await a.upload(item2)
|
||||||
|
|
||||||
sync(a, b, status, force_delete=True)
|
await sync(a, b, status, force_delete=True)
|
||||||
sync(a, b, status, force_delete=True)
|
await sync(a, b, status, force_delete=True)
|
||||||
|
|
||||||
assert not items(a) and not items(b)
|
assert not items(a)
|
||||||
|
assert not items(b)
|
||||||
|
|
||||||
|
|
||||||
def test_partial_sync_error():
|
@pytest.mark.asyncio
|
||||||
|
async def test_partial_sync_error():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
a.upload(Item("UID:0"))
|
await a.upload(Item("UID:0"))
|
||||||
b.read_only = True
|
b.read_only = True
|
||||||
|
|
||||||
with pytest.raises(PartialSync):
|
with pytest.raises(PartialSync):
|
||||||
sync(a, b, status, partial_sync="error")
|
await sync(a, b, status, partial_sync="error")
|
||||||
|
|
||||||
|
|
||||||
def test_partial_sync_ignore():
|
@pytest.mark.asyncio
|
||||||
|
async def test_partial_sync_ignore():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
item0 = Item("UID:0\nhehe")
|
item0 = Item("UID:0\nhehe")
|
||||||
a.upload(item0)
|
await a.upload(item0)
|
||||||
b.upload(item0)
|
await b.upload(item0)
|
||||||
|
|
||||||
b.read_only = True
|
b.read_only = True
|
||||||
|
|
||||||
item1 = Item("UID:1\nhaha")
|
item1 = Item("UID:1\nhaha")
|
||||||
a.upload(item1)
|
await a.upload(item1)
|
||||||
|
|
||||||
sync(a, b, status, partial_sync="ignore")
|
await sync(a, b, status, partial_sync="ignore")
|
||||||
sync(a, b, status, partial_sync="ignore")
|
await sync(a, b, status, partial_sync="ignore")
|
||||||
|
|
||||||
assert items(a) == {item0.raw, item1.raw}
|
assert items(a) == {item0.raw, item1.raw}
|
||||||
assert items(b) == {item0.raw}
|
assert items(b) == {item0.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_partial_sync_ignore2():
|
@pytest.mark.asyncio
|
||||||
|
async def test_partial_sync_ignore2():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
href, etag = a.upload(Item("UID:0"))
|
href, etag = await a.upload(Item("UID:0"))
|
||||||
a.read_only = True
|
a.read_only = True
|
||||||
|
|
||||||
sync(a, b, status, partial_sync="ignore", force_delete=True)
|
await sync(a, b, status, partial_sync="ignore", force_delete=True)
|
||||||
assert items(b) == items(a) == {"UID:0"}
|
assert items(b) == items(a) == {"UID:0"}
|
||||||
|
|
||||||
b.items.clear()
|
b.items.clear()
|
||||||
sync(a, b, status, partial_sync="ignore", force_delete=True)
|
await sync(a, b, status, partial_sync="ignore", force_delete=True)
|
||||||
sync(a, b, status, partial_sync="ignore", force_delete=True)
|
await sync(a, b, status, partial_sync="ignore", force_delete=True)
|
||||||
assert items(a) == {"UID:0"}
|
assert items(a) == {"UID:0"}
|
||||||
assert not b.items
|
assert not b.items
|
||||||
|
|
||||||
a.read_only = False
|
a.read_only = False
|
||||||
a.update(href, Item("UID:0\nupdated"), etag)
|
await a.update(href, Item("UID:0\nupdated"), etag)
|
||||||
a.read_only = True
|
a.read_only = True
|
||||||
sync(a, b, status, partial_sync="ignore", force_delete=True)
|
await sync(a, b, status, partial_sync="ignore", force_delete=True)
|
||||||
assert items(b) == items(a) == {"UID:0\nupdated"}
|
assert items(b) == items(a) == {"UID:0\nupdated"}
|
||||||
|
|
||||||
|
|
||||||
def test_upload_and_update():
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_and_update():
|
||||||
a = MemoryStorage(fileext=".a")
|
a = MemoryStorage(fileext=".a")
|
||||||
b = MemoryStorage(fileext=".b")
|
b = MemoryStorage(fileext=".b")
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
item = Item("UID:1") # new item 1 in a
|
item = Item("UID:1") # new item 1 in a
|
||||||
a.upload(item)
|
await a.upload(item)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(b) == items(a) == {item.raw}
|
assert items(b) == items(a) == {item.raw}
|
||||||
|
|
||||||
item = Item("UID:1\nASDF:YES") # update of item 1 in b
|
item = Item("UID:1\nASDF:YES") # update of item 1 in b
|
||||||
b.update("1.b", item, b.get("1.b")[1])
|
await b.update("1.b", item, (await b.get("1.b"))[1])
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(b) == items(a) == {item.raw}
|
assert items(b) == items(a) == {item.raw}
|
||||||
|
|
||||||
item2 = Item("UID:2") # new item 2 in b
|
item2 = Item("UID:2") # new item 2 in b
|
||||||
b.upload(item2)
|
await b.upload(item2)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(b) == items(a) == {item.raw, item2.raw}
|
assert items(b) == items(a) == {item.raw, item2.raw}
|
||||||
|
|
||||||
item2 = Item("UID:2\nASDF:YES") # update of item 2 in a
|
item2 = Item("UID:2\nASDF:YES") # update of item 2 in a
|
||||||
a.update("2.a", item2, a.get("2.a")[1])
|
await a.update("2.a", item2, (await a.get("2.a"))[1])
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(b) == items(a) == {item.raw, item2.raw}
|
assert items(b) == items(a) == {item.raw, item2.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_deletion():
|
@pytest.mark.asyncio
|
||||||
|
async def test_deletion():
|
||||||
a = MemoryStorage(fileext=".a")
|
a = MemoryStorage(fileext=".a")
|
||||||
b = MemoryStorage(fileext=".b")
|
b = MemoryStorage(fileext=".b")
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
a.upload(item)
|
await a.upload(item)
|
||||||
item2 = Item("UID:2")
|
item2 = Item("UID:2")
|
||||||
a.upload(item2)
|
await a.upload(item2)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
b.delete("1.b", b.get("1.b")[1])
|
await b.delete("1.b", (await b.get("1.b"))[1])
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(a) == items(b) == {item2.raw}
|
assert items(a) == items(b) == {item2.raw}
|
||||||
|
|
||||||
a.upload(item)
|
await a.upload(item)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(a) == items(b) == {item.raw, item2.raw}
|
assert items(a) == items(b) == {item.raw, item2.raw}
|
||||||
a.delete("1.a", a.get("1.a")[1])
|
await a.delete("1.a", (await a.get("1.a"))[1])
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(a) == items(b) == {item2.raw}
|
assert items(a) == items(b) == {item2.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_insert_hash():
|
@pytest.mark.asyncio
|
||||||
|
async def test_insert_hash():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
href, etag = a.upload(item)
|
href, etag = await a.upload(item)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
for d in status["1"]:
|
for d in status["1"]:
|
||||||
del d["hash"]
|
del d["hash"]
|
||||||
|
|
||||||
a.update(href, Item("UID:1\nHAHA:YES"), etag)
|
await a.update(href, Item("UID:1\nHAHA:YES"), etag)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert "hash" in status["1"][0] and "hash" in status["1"][1]
|
assert "hash" in status["1"][0]
|
||||||
|
assert "hash" in status["1"][1]
|
||||||
|
|
||||||
|
|
||||||
def test_already_synced():
|
@pytest.mark.asyncio
|
||||||
|
async def test_already_synced():
|
||||||
a = MemoryStorage(fileext=".a")
|
a = MemoryStorage(fileext=".a")
|
||||||
b = MemoryStorage(fileext=".b")
|
b = MemoryStorage(fileext=".b")
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
a.upload(item)
|
await a.upload(item)
|
||||||
b.upload(item)
|
await b.upload(item)
|
||||||
status = {
|
status = {
|
||||||
"1": (
|
"1": (
|
||||||
{"href": "1.a", "hash": item.hash, "etag": a.get("1.a")[1]},
|
{"href": "1.a", "hash": item.hash, "etag": (await a.get("1.a"))[1]},
|
||||||
{"href": "1.b", "hash": item.hash, "etag": b.get("1.b")[1]},
|
{"href": "1.b", "hash": item.hash, "etag": (await b.get("1.b"))[1]},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
old_status = deepcopy(status)
|
old_status = deepcopy(status)
|
||||||
|
|
@ -233,69 +250,73 @@ def test_already_synced():
|
||||||
)
|
)
|
||||||
|
|
||||||
for _ in (1, 2):
|
for _ in (1, 2):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert status == old_status
|
assert status == old_status
|
||||||
assert items(a) == items(b) == {item.raw}
|
assert items(a) == items(b) == {item.raw}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("winning_storage", "ab")
|
@pytest.mark.parametrize("winning_storage", "ab")
|
||||||
def test_conflict_resolution_both_etags_new(winning_storage):
|
@pytest.mark.asyncio
|
||||||
|
async def test_conflict_resolution_both_etags_new(winning_storage):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
href_a, etag_a = a.upload(item)
|
href_a, etag_a = await a.upload(item)
|
||||||
href_b, etag_b = b.upload(item)
|
href_b, etag_b = await b.upload(item)
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert status
|
assert status
|
||||||
item_a = Item("UID:1\nitem a")
|
item_a = Item("UID:1\nitem a")
|
||||||
item_b = Item("UID:1\nitem b")
|
item_b = Item("UID:1\nitem b")
|
||||||
a.update(href_a, item_a, etag_a)
|
await a.update(href_a, item_a, etag_a)
|
||||||
b.update(href_b, item_b, etag_b)
|
await b.update(href_b, item_b, etag_b)
|
||||||
with pytest.raises(SyncConflict):
|
with pytest.raises(SyncConflict):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
sync(a, b, status, conflict_resolution=f"{winning_storage} wins")
|
await sync(a, b, status, conflict_resolution=f"{winning_storage} wins")
|
||||||
assert (
|
assert (
|
||||||
items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw}
|
items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_updated_and_deleted():
|
@pytest.mark.asyncio
|
||||||
|
async def test_updated_and_deleted():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
href_a, etag_a = a.upload(Item("UID:1"))
|
href_a, etag_a = await a.upload(Item("UID:1"))
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status, force_delete=True)
|
await sync(a, b, status, force_delete=True)
|
||||||
|
|
||||||
((href_b, etag_b),) = b.list()
|
((href_b, etag_b),) = await aiostream.stream.list(b.list())
|
||||||
b.delete(href_b, etag_b)
|
await b.delete(href_b, etag_b)
|
||||||
updated = Item("UID:1\nupdated")
|
updated = Item("UID:1\nupdated")
|
||||||
a.update(href_a, updated, etag_a)
|
await a.update(href_a, updated, etag_a)
|
||||||
sync(a, b, status, force_delete=True)
|
await sync(a, b, status, force_delete=True)
|
||||||
|
|
||||||
assert items(a) == items(b) == {updated.raw}
|
assert items(a) == items(b) == {updated.raw}
|
||||||
|
|
||||||
|
|
||||||
def test_conflict_resolution_invalid_mode():
|
@pytest.mark.asyncio
|
||||||
|
async def test_conflict_resolution_invalid_mode():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
item_a = Item("UID:1\nitem a")
|
item_a = Item("UID:1\nitem a")
|
||||||
item_b = Item("UID:1\nitem b")
|
item_b = Item("UID:1\nitem b")
|
||||||
a.upload(item_a)
|
await a.upload(item_a)
|
||||||
b.upload(item_b)
|
await b.upload(item_b)
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
sync(a, b, {}, conflict_resolution="yolo")
|
await sync(a, b, {}, conflict_resolution="yolo")
|
||||||
|
|
||||||
|
|
||||||
def test_conflict_resolution_new_etags_without_changes():
|
@pytest.mark.asyncio
|
||||||
|
async def test_conflict_resolution_new_etags_without_changes():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
href_a, etag_a = a.upload(item)
|
href_a, etag_a = await a.upload(item)
|
||||||
href_b, etag_b = b.upload(item)
|
href_b, etag_b = await b.upload(item)
|
||||||
status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")}
|
status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")}
|
||||||
|
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
((ident, (status_a, status_b)),) = status.items()
|
((ident, (status_a, status_b)),) = status.items()
|
||||||
assert ident == "1"
|
assert ident == "1"
|
||||||
|
|
@ -305,7 +326,8 @@ def test_conflict_resolution_new_etags_without_changes():
|
||||||
assert status_b["etag"] == etag_b
|
assert status_b["etag"] == etag_b
|
||||||
|
|
||||||
|
|
||||||
def test_uses_get_multi(monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_uses_get_multi(monkeypatch):
|
||||||
def breakdown(*a, **kw):
|
def breakdown(*a, **kw):
|
||||||
raise AssertionError("Expected use of get_multi")
|
raise AssertionError("Expected use of get_multi")
|
||||||
|
|
||||||
|
|
@ -313,11 +335,11 @@ def test_uses_get_multi(monkeypatch):
|
||||||
|
|
||||||
old_get = MemoryStorage.get
|
old_get = MemoryStorage.get
|
||||||
|
|
||||||
def get_multi(self, hrefs):
|
async def get_multi(self, hrefs):
|
||||||
hrefs = list(hrefs)
|
hrefs = list(hrefs)
|
||||||
get_multi_calls.append(hrefs)
|
get_multi_calls.append(hrefs)
|
||||||
for href in hrefs:
|
for href in hrefs:
|
||||||
item, etag = old_get(self, href)
|
item, etag = await old_get(self, href)
|
||||||
yield href, item, etag
|
yield href, item, etag
|
||||||
|
|
||||||
monkeypatch.setattr(MemoryStorage, "get", breakdown)
|
monkeypatch.setattr(MemoryStorage, "get", breakdown)
|
||||||
|
|
@ -326,72 +348,77 @@ def test_uses_get_multi(monkeypatch):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
item = Item("UID:1")
|
item = Item("UID:1")
|
||||||
expected_href, etag = a.upload(item)
|
expected_href, _etag = await a.upload(item)
|
||||||
|
|
||||||
sync(a, b, {})
|
await sync(a, b, {})
|
||||||
assert get_multi_calls == [[expected_href]]
|
assert get_multi_calls == [[expected_href]]
|
||||||
|
|
||||||
|
|
||||||
def test_empty_storage_dataloss():
|
@pytest.mark.asyncio
|
||||||
|
async def test_empty_storage_dataloss():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
a.upload(Item("UID:1"))
|
await a.upload(Item("UID:1"))
|
||||||
a.upload(Item("UID:2"))
|
await a.upload(Item("UID:2"))
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
with pytest.raises(StorageEmpty):
|
with pytest.raises(StorageEmpty):
|
||||||
sync(MemoryStorage(), b, status)
|
await sync(MemoryStorage(), b, status)
|
||||||
|
|
||||||
with pytest.raises(StorageEmpty):
|
with pytest.raises(StorageEmpty):
|
||||||
sync(a, MemoryStorage(), status)
|
await sync(a, MemoryStorage(), status)
|
||||||
|
|
||||||
|
|
||||||
def test_no_uids():
|
@pytest.mark.asyncio
|
||||||
|
async def test_no_uids():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
a.upload(Item("ASDF"))
|
await a.upload(Item("ASDF"))
|
||||||
b.upload(Item("FOOBAR"))
|
await b.upload(Item("FOOBAR"))
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert items(a) == items(b) == {"ASDF", "FOOBAR"}
|
assert items(a) == items(b) == {"ASDF", "FOOBAR"}
|
||||||
|
|
||||||
|
|
||||||
def test_changed_uids():
|
@pytest.mark.asyncio
|
||||||
|
async def test_changed_uids():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
href_a, etag_a = a.upload(Item("UID:A-ONE"))
|
href_a, etag_a = await a.upload(Item("UID:A-ONE"))
|
||||||
href_b, etag_b = b.upload(Item("UID:B-ONE"))
|
_href_b, _etag_b = await b.upload(Item("UID:B-ONE"))
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
a.update(href_a, Item("UID:A-TWO"), etag_a)
|
await a.update(href_a, Item("UID:A-TWO"), etag_a)
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
|
|
||||||
def test_both_readonly():
|
@pytest.mark.asyncio
|
||||||
|
async def test_both_readonly():
|
||||||
a = MemoryStorage(read_only=True)
|
a = MemoryStorage(read_only=True)
|
||||||
b = MemoryStorage(read_only=True)
|
b = MemoryStorage(read_only=True)
|
||||||
assert a.read_only
|
assert a.read_only
|
||||||
assert b.read_only
|
assert b.read_only
|
||||||
status = {}
|
status = {}
|
||||||
with pytest.raises(BothReadOnly):
|
with pytest.raises(BothReadOnly):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
|
|
||||||
def test_partial_sync_revert():
|
@pytest.mark.asyncio
|
||||||
|
async def test_partial_sync_revert():
|
||||||
a = MemoryStorage(instance_name="a")
|
a = MemoryStorage(instance_name="a")
|
||||||
b = MemoryStorage(instance_name="b")
|
b = MemoryStorage(instance_name="b")
|
||||||
status = {}
|
status = {}
|
||||||
a.upload(Item("UID:1"))
|
await a.upload(Item("UID:1"))
|
||||||
b.upload(Item("UID:2"))
|
await b.upload(Item("UID:2"))
|
||||||
b.read_only = True
|
b.read_only = True
|
||||||
|
|
||||||
sync(a, b, status, partial_sync="revert")
|
await sync(a, b, status, partial_sync="revert")
|
||||||
assert len(status) == 2
|
assert len(status) == 2
|
||||||
assert items(a) == {"UID:1", "UID:2"}
|
assert items(a) == {"UID:1", "UID:2"}
|
||||||
assert items(b) == {"UID:2"}
|
assert items(b) == {"UID:2"}
|
||||||
|
|
||||||
sync(a, b, status, partial_sync="revert")
|
await sync(a, b, status, partial_sync="revert")
|
||||||
assert len(status) == 1
|
assert len(status) == 1
|
||||||
assert items(a) == {"UID:2"}
|
assert items(a) == {"UID:2"}
|
||||||
assert items(b) == {"UID:2"}
|
assert items(b) == {"UID:2"}
|
||||||
|
|
@ -399,37 +426,39 @@ def test_partial_sync_revert():
|
||||||
# Check that updates get reverted
|
# Check that updates get reverted
|
||||||
a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated"))
|
a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated"))
|
||||||
assert items(a) == {"UID:2\nupdated"}
|
assert items(a) == {"UID:2\nupdated"}
|
||||||
sync(a, b, status, partial_sync="revert")
|
await sync(a, b, status, partial_sync="revert")
|
||||||
assert len(status) == 1
|
assert len(status) == 1
|
||||||
assert items(a) == {"UID:2\nupdated"}
|
assert items(a) == {"UID:2\nupdated"}
|
||||||
sync(a, b, status, partial_sync="revert")
|
await sync(a, b, status, partial_sync="revert")
|
||||||
assert items(a) == {"UID:2"}
|
assert items(a) == {"UID:2"}
|
||||||
|
|
||||||
# Check that deletions get reverted
|
# Check that deletions get reverted
|
||||||
a.items.clear()
|
a.items.clear()
|
||||||
sync(a, b, status, partial_sync="revert", force_delete=True)
|
await sync(a, b, status, partial_sync="revert", force_delete=True)
|
||||||
sync(a, b, status, partial_sync="revert", force_delete=True)
|
await sync(a, b, status, partial_sync="revert", force_delete=True)
|
||||||
assert items(a) == {"UID:2"}
|
assert items(a) == {"UID:2"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("sync_inbetween", (True, False))
|
@pytest.mark.parametrize("sync_inbetween", [True, False])
|
||||||
def test_ident_conflict(sync_inbetween):
|
@pytest.mark.asyncio
|
||||||
|
async def test_ident_conflict(sync_inbetween):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
href_a, etag_a = a.upload(Item("UID:aaa"))
|
href_a, etag_a = await a.upload(Item("UID:aaa"))
|
||||||
href_b, etag_b = a.upload(Item("UID:bbb"))
|
href_b, etag_b = await a.upload(Item("UID:bbb"))
|
||||||
if sync_inbetween:
|
if sync_inbetween:
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
a.update(href_a, Item("UID:xxx"), etag_a)
|
await a.update(href_a, Item("UID:xxx"), etag_a)
|
||||||
a.update(href_b, Item("UID:xxx"), etag_b)
|
await a.update(href_b, Item("UID:xxx"), etag_b)
|
||||||
|
|
||||||
with pytest.raises(IdentConflict):
|
with pytest.raises(IdentConflict):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
|
|
||||||
def test_moved_href():
|
@pytest.mark.asyncio
|
||||||
|
async def test_moved_href():
|
||||||
"""
|
"""
|
||||||
Concrete application: ppl_ stores contact aliases in filenames, which means
|
Concrete application: ppl_ stores contact aliases in filenames, which means
|
||||||
item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but
|
item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but
|
||||||
|
|
@ -440,8 +469,8 @@ def test_moved_href():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
href, etag = a.upload(Item("UID:haha"))
|
_href, _etag = await a.upload(Item("UID:haha"))
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
b.items["lol"] = b.items.pop("haha")
|
b.items["lol"] = b.items.pop("haha")
|
||||||
|
|
||||||
|
|
@ -451,7 +480,7 @@ def test_moved_href():
|
||||||
# No actual sync actions
|
# No actual sync actions
|
||||||
a.delete = a.update = a.upload = b.delete = b.update = b.upload = blow_up
|
a.delete = a.update = a.upload = b.delete = b.update = b.upload = blow_up
|
||||||
|
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert len(status) == 1
|
assert len(status) == 1
|
||||||
assert items(a) == items(b) == {"UID:haha"}
|
assert items(a) == items(b) == {"UID:haha"}
|
||||||
assert status["haha"][1]["href"] == "lol"
|
assert status["haha"][1]["href"] == "lol"
|
||||||
|
|
@ -460,12 +489,13 @@ def test_moved_href():
|
||||||
# Further sync should be a noop. Not even prefetching should occur.
|
# Further sync should be a noop. Not even prefetching should occur.
|
||||||
b.get_multi = blow_up
|
b.get_multi = blow_up
|
||||||
|
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert old_status == status
|
assert old_status == status
|
||||||
assert items(a) == items(b) == {"UID:haha"}
|
assert items(a) == items(b) == {"UID:haha"}
|
||||||
|
|
||||||
|
|
||||||
def test_bogus_etag_change():
|
@pytest.mark.asyncio
|
||||||
|
async def test_bogus_etag_change():
|
||||||
"""Assert that sync algorithm is resilient against etag changes if content
|
"""Assert that sync algorithm is resilient against etag changes if content
|
||||||
didn\'t change.
|
didn\'t change.
|
||||||
|
|
||||||
|
|
@ -475,27 +505,33 @@ def test_bogus_etag_change():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
href_a, etag_a = a.upload(Item("UID:ASDASD"))
|
href_a, etag_a = await a.upload(Item("UID:ASDASD"))
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert len(status) == len(list(a.list())) == len(list(b.list())) == 1
|
assert (
|
||||||
|
len(status)
|
||||||
|
== len(await aiostream.stream.list(a.list()))
|
||||||
|
== len(await aiostream.stream.list(b.list()))
|
||||||
|
== 1
|
||||||
|
)
|
||||||
|
|
||||||
((href_b, etag_b),) = b.list()
|
((href_b, etag_b),) = await aiostream.stream.list(b.list())
|
||||||
a.update(href_a, Item("UID:ASDASD"), etag_a)
|
await a.update(href_a, Item("UID:ASDASD"), etag_a)
|
||||||
b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b)
|
await b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b)
|
||||||
|
|
||||||
b.delete = b.update = b.upload = blow_up
|
b.delete = b.update = b.upload = blow_up
|
||||||
|
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
assert len(status) == 1
|
assert len(status) == 1
|
||||||
assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"}
|
assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"}
|
||||||
|
|
||||||
|
|
||||||
def test_unicode_hrefs():
|
@pytest.mark.asyncio
|
||||||
|
async def test_unicode_hrefs():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
href, etag = a.upload(Item("UID:äää"))
|
_href, _etag = await a.upload(Item("UID:äää"))
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
||||||
|
|
||||||
class ActionIntentionallyFailed(Exception):
|
class ActionIntentionallyFailed(Exception):
|
||||||
|
|
@ -503,7 +539,7 @@ class ActionIntentionallyFailed(Exception):
|
||||||
|
|
||||||
|
|
||||||
def action_failure(*a, **kw):
|
def action_failure(*a, **kw):
|
||||||
raise ActionIntentionallyFailed()
|
raise ActionIntentionallyFailed
|
||||||
|
|
||||||
|
|
||||||
class SyncMachine(RuleBasedStateMachine):
|
class SyncMachine(RuleBasedStateMachine):
|
||||||
|
|
@ -511,12 +547,13 @@ class SyncMachine(RuleBasedStateMachine):
|
||||||
Storage = Bundle("storage")
|
Storage = Bundle("storage")
|
||||||
|
|
||||||
@rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans())
|
@rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans())
|
||||||
|
@pytest.mark.asyncio
|
||||||
def newstorage(self, flaky_etags, null_etag_on_upload):
|
def newstorage(self, flaky_etags, null_etag_on_upload):
|
||||||
s = MemoryStorage()
|
s = MemoryStorage()
|
||||||
if flaky_etags:
|
if flaky_etags:
|
||||||
|
|
||||||
def get(href):
|
async def get(href):
|
||||||
old_etag, item = s.items[href]
|
_old_etag, item = s.items[href]
|
||||||
etag = _random_string()
|
etag = _random_string()
|
||||||
s.items[href] = etag, item
|
s.items[href] = etag, item
|
||||||
return item, etag
|
return item, etag
|
||||||
|
|
@ -526,8 +563,15 @@ class SyncMachine(RuleBasedStateMachine):
|
||||||
if null_etag_on_upload:
|
if null_etag_on_upload:
|
||||||
_old_upload = s.upload
|
_old_upload = s.upload
|
||||||
_old_update = s.update
|
_old_update = s.update
|
||||||
s.upload = lambda item: (_old_upload(item)[0], "NULL")
|
|
||||||
s.update = lambda h, i, e: _old_update(h, i, e) and "NULL"
|
async def upload(item):
|
||||||
|
return (await _old_upload(item))[0], "NULL"
|
||||||
|
|
||||||
|
async def update(href, item, etag):
|
||||||
|
return await _old_update(href, item, etag) and "NULL"
|
||||||
|
|
||||||
|
s.upload = upload
|
||||||
|
s.update = update
|
||||||
|
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
@ -547,11 +591,11 @@ class SyncMachine(RuleBasedStateMachine):
|
||||||
_old_upload = s.upload
|
_old_upload = s.upload
|
||||||
_old_update = s.update
|
_old_update = s.update
|
||||||
|
|
||||||
def upload(item):
|
async def upload(item):
|
||||||
return _old_upload(item)[0], None
|
return (await _old_upload(item))[0], None
|
||||||
|
|
||||||
def update(href, item, etag):
|
async def update(href, item, etag):
|
||||||
_old_update(href, item, etag)
|
return await _old_update(href, item, etag)
|
||||||
|
|
||||||
s.upload = upload
|
s.upload = upload
|
||||||
s.update = update
|
s.update = update
|
||||||
|
|
@ -590,66 +634,71 @@ class SyncMachine(RuleBasedStateMachine):
|
||||||
with_error_callback,
|
with_error_callback,
|
||||||
partial_sync,
|
partial_sync,
|
||||||
):
|
):
|
||||||
assume(a is not b)
|
async def inner():
|
||||||
old_items_a = items(a)
|
assume(a is not b)
|
||||||
old_items_b = items(b)
|
old_items_a = items(a)
|
||||||
|
old_items_b = items(b)
|
||||||
|
|
||||||
a.instance_name = "a"
|
a.instance_name = "a"
|
||||||
b.instance_name = "b"
|
b.instance_name = "b"
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if with_error_callback:
|
error_callback = errors.append if with_error_callback else None
|
||||||
error_callback = errors.append
|
|
||||||
else:
|
|
||||||
error_callback = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# If one storage is read-only, double-sync because changes don't
|
# If one storage is read-only, double-sync because changes don't
|
||||||
# get reverted immediately.
|
# get reverted immediately.
|
||||||
for _ in range(2 if a.read_only or b.read_only else 1):
|
for _ in range(2 if a.read_only or b.read_only else 1):
|
||||||
sync(
|
await sync(
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
status,
|
status,
|
||||||
force_delete=force_delete,
|
force_delete=force_delete,
|
||||||
conflict_resolution=conflict_resolution,
|
conflict_resolution=conflict_resolution,
|
||||||
error_callback=error_callback,
|
error_callback=error_callback,
|
||||||
partial_sync=partial_sync,
|
partial_sync=partial_sync,
|
||||||
|
)
|
||||||
|
|
||||||
|
for e in errors:
|
||||||
|
raise e
|
||||||
|
except PartialSync:
|
||||||
|
assert partial_sync == "error"
|
||||||
|
except ActionIntentionallyFailed:
|
||||||
|
pass
|
||||||
|
except BothReadOnly:
|
||||||
|
assert a.read_only
|
||||||
|
assert b.read_only
|
||||||
|
assume(False)
|
||||||
|
except StorageEmpty:
|
||||||
|
if force_delete:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
not_a = not await aiostream.stream.list(a.list())
|
||||||
|
not_b = not await aiostream.stream.list(b.list())
|
||||||
|
assert not_a or not_b
|
||||||
|
else:
|
||||||
|
items_a = items(a)
|
||||||
|
items_b = items(b)
|
||||||
|
|
||||||
|
assert items_a == items_b or partial_sync == "ignore"
|
||||||
|
assert items_a == old_items_a or not a.read_only
|
||||||
|
assert items_b == old_items_b or not b.read_only
|
||||||
|
|
||||||
|
assert (
|
||||||
|
set(a.items) | set(b.items) == set(status)
|
||||||
|
or partial_sync == "ignore"
|
||||||
)
|
)
|
||||||
|
|
||||||
for e in errors:
|
asyncio.run(inner())
|
||||||
raise e
|
|
||||||
except PartialSync:
|
|
||||||
assert partial_sync == "error"
|
|
||||||
except ActionIntentionallyFailed:
|
|
||||||
pass
|
|
||||||
except BothReadOnly:
|
|
||||||
assert a.read_only and b.read_only
|
|
||||||
assume(False)
|
|
||||||
except StorageEmpty:
|
|
||||||
if force_delete:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
assert not list(a.list()) or not list(b.list())
|
|
||||||
else:
|
|
||||||
items_a = items(a)
|
|
||||||
items_b = items(b)
|
|
||||||
|
|
||||||
assert items_a == items_b or partial_sync == "ignore"
|
|
||||||
assert items_a == old_items_a or not a.read_only
|
|
||||||
assert items_b == old_items_b or not b.read_only
|
|
||||||
|
|
||||||
assert (
|
|
||||||
set(a.items) | set(b.items) == set(status) or partial_sync == "ignore"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
TestSyncMachine = SyncMachine.TestCase
|
TestSyncMachine = SyncMachine.TestCase
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("error_callback", [True, False])
|
@pytest.mark.parametrize("error_callback", [True, False])
|
||||||
def test_rollback(error_callback):
|
@pytest.mark.asyncio
|
||||||
|
async def test_rollback(error_callback):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
@ -662,7 +711,7 @@ def test_rollback(error_callback):
|
||||||
if error_callback:
|
if error_callback:
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
sync(
|
await sync(
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
status=status,
|
status=status,
|
||||||
|
|
@ -677,16 +726,22 @@ def test_rollback(error_callback):
|
||||||
assert status["1"]
|
assert status["1"]
|
||||||
else:
|
else:
|
||||||
with pytest.raises(ActionIntentionallyFailed):
|
with pytest.raises(ActionIntentionallyFailed):
|
||||||
sync(a, b, status=status, conflict_resolution="a wins")
|
await sync(a, b, status=status, conflict_resolution="a wins")
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_hrefs():
|
@pytest.mark.asyncio
|
||||||
|
async def test_duplicate_hrefs():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
a.list = lambda: [("a", "a")] * 3
|
|
||||||
|
async def fake_list():
|
||||||
|
for item in [("a", "a")] * 3:
|
||||||
|
yield item
|
||||||
|
|
||||||
|
a.list = fake_list
|
||||||
a.items["a"] = ("a", Item("UID:a"))
|
a.items["a"] = ("a", Item("UID:a"))
|
||||||
|
|
||||||
status = {}
|
status = {}
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
with pytest.raises(AssertionError):
|
with pytest.raises(AssertionError):
|
||||||
sync(a, b, status)
|
await sync(a, b, status)
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,116 +1,140 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
from hypothesis import example
|
from hypothesis import example
|
||||||
from hypothesis import given
|
from hypothesis import given
|
||||||
|
|
||||||
from tests import blow_up
|
from tests import blow_up
|
||||||
from vdirsyncer.exceptions import UserError
|
from vdirsyncer.exceptions import UserError
|
||||||
|
from vdirsyncer.metasync import MetaSyncConflict
|
||||||
from vdirsyncer.metasync import logger
|
from vdirsyncer.metasync import logger
|
||||||
from vdirsyncer.metasync import metasync
|
from vdirsyncer.metasync import metasync
|
||||||
from vdirsyncer.metasync import MetaSyncConflict
|
|
||||||
from vdirsyncer.storage.base import normalize_meta_value
|
from vdirsyncer.storage.base import normalize_meta_value
|
||||||
from vdirsyncer.storage.memory import MemoryStorage
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
|
||||||
|
|
||||||
def test_irrelevant_status():
|
@pytest.mark.asyncio
|
||||||
|
async def test_irrelevant_status():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {"foo": "bar"}
|
status = {"foo": "bar"}
|
||||||
|
|
||||||
metasync(a, b, status, keys=())
|
await metasync(a, b, status, keys=())
|
||||||
assert not status
|
assert not status
|
||||||
|
|
||||||
|
|
||||||
def test_basic(monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_basic(monkeypatch):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
a.set_meta("foo", "bar")
|
await a.set_meta("foo", None)
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
assert a.get_meta("foo") == b.get_meta("foo") == "bar"
|
assert await a.get_meta("foo") is None
|
||||||
|
assert await b.get_meta("foo") is None
|
||||||
|
|
||||||
a.set_meta("foo", "baz")
|
await a.set_meta("foo", "bar")
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
assert a.get_meta("foo") == b.get_meta("foo") == "baz"
|
assert await a.get_meta("foo") == await b.get_meta("foo") == "bar"
|
||||||
|
|
||||||
|
await a.set_meta("foo", "baz")
|
||||||
|
await metasync(a, b, status, keys=["foo"])
|
||||||
|
assert await a.get_meta("foo") == await b.get_meta("foo") == "baz"
|
||||||
|
|
||||||
monkeypatch.setattr(a, "set_meta", blow_up)
|
monkeypatch.setattr(a, "set_meta", blow_up)
|
||||||
monkeypatch.setattr(b, "set_meta", blow_up)
|
monkeypatch.setattr(b, "set_meta", blow_up)
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
assert a.get_meta("foo") == b.get_meta("foo") == "baz"
|
assert await a.get_meta("foo") == await b.get_meta("foo") == "baz"
|
||||||
monkeypatch.undo()
|
monkeypatch.undo()
|
||||||
monkeypatch.undo()
|
monkeypatch.undo()
|
||||||
|
|
||||||
b.set_meta("foo", None)
|
await b.set_meta("foo", None)
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
assert not a.get_meta("foo") and not b.get_meta("foo")
|
assert not await a.get_meta("foo")
|
||||||
|
assert not await b.get_meta("foo")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest_asyncio.fixture
|
||||||
def conflict_state(request):
|
async def conflict_state(request):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
a.set_meta("foo", "bar")
|
await a.set_meta("foo", "bar")
|
||||||
b.set_meta("foo", "baz")
|
await b.set_meta("foo", "baz")
|
||||||
|
|
||||||
def cleanup():
|
async def do_cleanup():
|
||||||
assert a.get_meta("foo") == "bar"
|
assert await a.get_meta("foo") == "bar"
|
||||||
assert b.get_meta("foo") == "baz"
|
assert await b.get_meta("foo") == "baz"
|
||||||
assert not status
|
assert not status
|
||||||
|
|
||||||
request.addfinalizer(cleanup)
|
request.addfinalizer(lambda: asyncio.run(do_cleanup()))
|
||||||
|
|
||||||
return a, b, status
|
return a, b, status
|
||||||
|
|
||||||
|
|
||||||
def test_conflict(conflict_state):
|
@pytest_asyncio.fixture
|
||||||
|
async def test_conflict(conflict_state):
|
||||||
a, b, status = conflict_state
|
a, b, status = conflict_state
|
||||||
|
|
||||||
with pytest.raises(MetaSyncConflict):
|
with pytest.raises(MetaSyncConflict):
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_conflict_resolution(conflict_state):
|
@pytest.mark.asyncio
|
||||||
|
async def test_invalid_conflict_resolution(conflict_state):
|
||||||
a, b, status = conflict_state
|
a, b, status = conflict_state
|
||||||
|
|
||||||
with pytest.raises(UserError) as excinfo:
|
with pytest.raises(UserError) as excinfo:
|
||||||
metasync(a, b, status, keys=["foo"], conflict_resolution="foo")
|
await metasync(a, b, status, keys=["foo"], conflict_resolution="foo")
|
||||||
|
|
||||||
assert "Invalid conflict resolution setting" in str(excinfo.value)
|
assert "Invalid conflict resolution setting" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch):
|
@pytest.mark.asyncio
|
||||||
|
async def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch):
|
||||||
a, b, status = conflict_state
|
a, b, status = conflict_state
|
||||||
warnings = []
|
warnings = []
|
||||||
monkeypatch.setattr(logger, "warning", warnings.append)
|
monkeypatch.setattr(logger, "warning", warnings.append)
|
||||||
|
|
||||||
with pytest.raises(MetaSyncConflict):
|
with pytest.raises(MetaSyncConflict):
|
||||||
metasync(a, b, status, keys=["foo"], conflict_resolution=lambda *a, **kw: None)
|
await metasync(
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
status,
|
||||||
|
keys=["foo"],
|
||||||
|
conflict_resolution=lambda *a, **kw: None,
|
||||||
|
)
|
||||||
|
|
||||||
assert warnings == ["Custom commands don't work on metasync."]
|
assert warnings == ["Custom commands don't work on metasync."]
|
||||||
|
|
||||||
|
|
||||||
def test_conflict_same_content():
|
@pytest.mark.asyncio
|
||||||
|
async def test_conflict_same_content():
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
a.set_meta("foo", "bar")
|
await a.set_meta("foo", "bar")
|
||||||
b.set_meta("foo", "bar")
|
await b.set_meta("foo", "bar")
|
||||||
|
|
||||||
metasync(a, b, status, keys=["foo"])
|
await metasync(a, b, status, keys=["foo"])
|
||||||
assert a.get_meta("foo") == b.get_meta("foo") == status["foo"] == "bar"
|
assert await a.get_meta("foo") == await b.get_meta("foo") == status["foo"] == "bar"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("wins", "ab")
|
@pytest.mark.parametrize("wins", "ab")
|
||||||
def test_conflict_x_wins(wins):
|
@pytest.mark.asyncio
|
||||||
|
async def test_conflict_x_wins(wins):
|
||||||
a = MemoryStorage()
|
a = MemoryStorage()
|
||||||
b = MemoryStorage()
|
b = MemoryStorage()
|
||||||
status = {}
|
status = {}
|
||||||
a.set_meta("foo", "bar")
|
await a.set_meta("foo", "bar")
|
||||||
b.set_meta("foo", "baz")
|
await b.set_meta("foo", "baz")
|
||||||
|
|
||||||
metasync(
|
await metasync(
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
status,
|
status,
|
||||||
|
|
@ -119,8 +143,8 @@ def test_conflict_x_wins(wins):
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
a.get_meta("foo")
|
await a.get_meta("foo")
|
||||||
== b.get_meta("foo")
|
== await b.get_meta("foo")
|
||||||
== status["foo"]
|
== status["foo"]
|
||||||
== ("bar" if wins == "a" else "baz")
|
== ("bar" if wins == "a" else "baz")
|
||||||
)
|
)
|
||||||
|
|
@ -148,7 +172,8 @@ metadata = st.dictionaries(keys, values)
|
||||||
keys={"0"},
|
keys={"0"},
|
||||||
conflict_resolution="a wins",
|
conflict_resolution="a wins",
|
||||||
)
|
)
|
||||||
def test_fuzzing(a, b, status, keys, conflict_resolution):
|
@pytest.mark.asyncio
|
||||||
|
async def test_fuzzing(a, b, status, keys, conflict_resolution):
|
||||||
def _get_storage(m, instance_name):
|
def _get_storage(m, instance_name):
|
||||||
s = MemoryStorage(instance_name=instance_name)
|
s = MemoryStorage(instance_name=instance_name)
|
||||||
s.metadata = m
|
s.metadata = m
|
||||||
|
|
@ -159,13 +184,13 @@ def test_fuzzing(a, b, status, keys, conflict_resolution):
|
||||||
|
|
||||||
winning_storage = a if conflict_resolution == "a wins" else b
|
winning_storage = a if conflict_resolution == "a wins" else b
|
||||||
expected_values = {
|
expected_values = {
|
||||||
key: winning_storage.get_meta(key) for key in keys if key not in status
|
key: await winning_storage.get_meta(key) for key in keys if key not in status
|
||||||
}
|
}
|
||||||
|
|
||||||
metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution)
|
await metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution)
|
||||||
|
|
||||||
for key in keys:
|
for key in keys:
|
||||||
s = status.get(key, "")
|
s = status.get(key)
|
||||||
assert a.get_meta(key) == b.get_meta(key) == s
|
assert await a.get_meta(key) == await b.get_meta(key) == s
|
||||||
if expected_values.get(key, "") and s:
|
if expected_values.get(key) and s:
|
||||||
assert s == expected_values[key]
|
assert s == expected_values[key]
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
from hypothesis import given
|
|
||||||
from hypothesis import HealthCheck
|
from hypothesis import HealthCheck
|
||||||
|
from hypothesis import given
|
||||||
from hypothesis import settings
|
from hypothesis import settings
|
||||||
|
|
||||||
from tests import uid_strategy
|
from tests import uid_strategy
|
||||||
|
|
@ -14,43 +17,48 @@ from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
@given(uid=uid_strategy)
|
@given(uid=uid_strategy)
|
||||||
# Using the random module for UIDs:
|
# Using the random module for UIDs:
|
||||||
@settings(suppress_health_check=HealthCheck.all())
|
@settings(suppress_health_check=list(HealthCheck))
|
||||||
def test_repair_uids(uid):
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_uids(uid):
|
||||||
s = MemoryStorage()
|
s = MemoryStorage()
|
||||||
s.items = {
|
s.items = {
|
||||||
"one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")),
|
"one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")),
|
||||||
"two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")),
|
"two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")),
|
||||||
}
|
}
|
||||||
|
|
||||||
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
|
uid1, uid2 = [(await s.get(href))[0].uid async for href, etag in s.list()]
|
||||||
assert uid1 == uid2
|
assert uid1 == uid2
|
||||||
|
|
||||||
repair_storage(s, repair_unsafe_uid=False)
|
await repair_storage(s, repair_unsafe_uid=False)
|
||||||
|
|
||||||
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
|
uid1, uid2 = [
|
||||||
|
(await s.get(href))[0].uid
|
||||||
|
for href, etag in await aiostream.stream.list(s.list())
|
||||||
|
]
|
||||||
assert uid1 != uid2
|
assert uid1 != uid2
|
||||||
|
|
||||||
|
|
||||||
@given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
|
@given(uid=uid_strategy.filter(lambda x: not href_safe(x)))
|
||||||
# Using the random module for UIDs:
|
# Using the random module for UIDs:
|
||||||
@settings(suppress_health_check=HealthCheck.all())
|
@settings(suppress_health_check=list(HealthCheck))
|
||||||
def test_repair_unsafe_uids(uid):
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_unsafe_uids(uid):
|
||||||
s = MemoryStorage()
|
s = MemoryStorage()
|
||||||
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
|
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
|
||||||
href, etag = s.upload(item)
|
href, _etag = await s.upload(item)
|
||||||
assert s.get(href)[0].uid == uid
|
assert (await s.get(href))[0].uid == uid
|
||||||
assert not href_safe(uid)
|
assert not href_safe(uid)
|
||||||
|
|
||||||
repair_storage(s, repair_unsafe_uid=True)
|
await repair_storage(s, repair_unsafe_uid=True)
|
||||||
|
|
||||||
new_href = list(s.list())[0][0]
|
new_href = (await aiostream.stream.list(s.list()))[0][0]
|
||||||
assert href_safe(new_href)
|
assert href_safe(new_href)
|
||||||
newuid = s.get(new_href)[0].uid
|
newuid = (await s.get(new_href))[0].uid
|
||||||
assert href_safe(newuid)
|
assert href_safe(newuid)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
|
("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")]
|
||||||
)
|
)
|
||||||
def test_repair_unsafe_href(uid, href):
|
def test_repair_unsafe_href(uid, href):
|
||||||
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
|
item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD")
|
||||||
|
|
|
||||||
136
tests/unit/test_retry.py
Normal file
136
tests/unit/test_retry.py
Normal file
|
|
@ -0,0 +1,136 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
from unittest.mock import Mock
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer.http import UsageLimitReached
|
||||||
|
from vdirsyncer.http import request
|
||||||
|
|
||||||
|
|
||||||
|
async def _create_mock_response(status: int, body: str | dict):
|
||||||
|
raw_body = body
|
||||||
|
text_body = json.dumps(body) if isinstance(body, dict) else body
|
||||||
|
|
||||||
|
mock_response = AsyncMock()
|
||||||
|
mock_response.status = status
|
||||||
|
mock_response.ok = 200 <= status < 300
|
||||||
|
mock_response.reason = "OK" if mock_response.ok else "Forbidden"
|
||||||
|
mock_response.headers = (
|
||||||
|
{"Content-Type": "application/json"}
|
||||||
|
if isinstance(raw_body, dict)
|
||||||
|
else {"Content-Type": "text/plain"}
|
||||||
|
)
|
||||||
|
mock_response.text.return_value = text_body
|
||||||
|
if isinstance(raw_body, dict):
|
||||||
|
mock_response.json.return_value = raw_body
|
||||||
|
else:
|
||||||
|
mock_response.json.side_effect = ValueError("Not JSON")
|
||||||
|
mock_response.raise_for_status = Mock(
|
||||||
|
side_effect=(
|
||||||
|
aiohttp.ClientResponseError(
|
||||||
|
request_info=AsyncMock(),
|
||||||
|
history=(),
|
||||||
|
status=status,
|
||||||
|
message=mock_response.reason,
|
||||||
|
headers=mock_response.headers,
|
||||||
|
)
|
||||||
|
if not mock_response.ok
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return mock_response
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_retry_on_usage_limit():
|
||||||
|
url = "http://example.com/api"
|
||||||
|
max_retries = 5 # As configured in the @retry decorator
|
||||||
|
|
||||||
|
mock_session = AsyncMock()
|
||||||
|
|
||||||
|
# Simulate (max_retries - 1) 403 errors and then a 200 OK
|
||||||
|
mock_session.request.side_effect = [
|
||||||
|
await _create_mock_response(
|
||||||
|
403,
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for _ in range(max_retries - 1)
|
||||||
|
] + [await _create_mock_response(200, "OK")]
|
||||||
|
|
||||||
|
async with (
|
||||||
|
aiohttp.ClientSession()
|
||||||
|
): # Dummy session. Will be replaced by mock_session at call
|
||||||
|
response = await request("GET", url, mock_session)
|
||||||
|
|
||||||
|
assert response.status == 200
|
||||||
|
assert mock_session.request.call_count == max_retries
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_retry_exceeds_max_attempts():
|
||||||
|
url = "http://example.com/api"
|
||||||
|
max_retries = 5 # As configured in the @retry decorator
|
||||||
|
|
||||||
|
mock_session = AsyncMock()
|
||||||
|
# Simulate max_retries 403 errors and then a 200 OK
|
||||||
|
mock_session.request.side_effect = [
|
||||||
|
await _create_mock_response(
|
||||||
|
403,
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for _ in range(max_retries)
|
||||||
|
]
|
||||||
|
|
||||||
|
async with (
|
||||||
|
aiohttp.ClientSession()
|
||||||
|
): # Dummy session. Will be replaced by mock_session at call
|
||||||
|
with pytest.raises(UsageLimitReached):
|
||||||
|
await request("GET", url, mock_session)
|
||||||
|
assert mock_session.request.call_count == max_retries
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_no_retry_on_generic_403_json():
|
||||||
|
url = "http://example.com/api"
|
||||||
|
|
||||||
|
mock_session = AsyncMock()
|
||||||
|
# Generic non-Google 403 error payload (e.g., GitHub-style)
|
||||||
|
mock_session.request.side_effect = [
|
||||||
|
await _create_mock_response(403, {"message": "API rate limit exceeded"})
|
||||||
|
]
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession():
|
||||||
|
with pytest.raises(aiohttp.ClientResponseError):
|
||||||
|
await request("GET", url, mock_session)
|
||||||
|
# Should not retry because it's not the Google quotaExceeded shape
|
||||||
|
assert mock_session.request.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_request_no_retry_on_generic_403_text():
|
||||||
|
url = "http://example.com/api"
|
||||||
|
|
||||||
|
mock_session = AsyncMock()
|
||||||
|
# Plain-text 403 body mentioning rate limits, but not structured as Google error
|
||||||
|
mock_session.request.side_effect = [
|
||||||
|
await _create_mock_response(403, "Rate limit exceeded")
|
||||||
|
]
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession():
|
||||||
|
with pytest.raises(aiohttp.ClientResponseError):
|
||||||
|
await request("GET", url, mock_session)
|
||||||
|
# Should not retry because the JSON shape is not Google quotaExceeded
|
||||||
|
assert mock_session.request.call_count == 1
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
|
|
@ -5,17 +7,16 @@ import pytest
|
||||||
from hypothesis import assume
|
from hypothesis import assume
|
||||||
from hypothesis import given
|
from hypothesis import given
|
||||||
from hypothesis.stateful import Bundle
|
from hypothesis.stateful import Bundle
|
||||||
from hypothesis.stateful import rule
|
|
||||||
from hypothesis.stateful import RuleBasedStateMachine
|
from hypothesis.stateful import RuleBasedStateMachine
|
||||||
|
from hypothesis.stateful import rule
|
||||||
|
|
||||||
import vdirsyncer.vobject as vobject
|
import vdirsyncer.vobject as vobject
|
||||||
from tests import BARE_EVENT_TEMPLATE
|
from tests import BARE_EVENT_TEMPLATE
|
||||||
from tests import EVENT_TEMPLATE
|
from tests import EVENT_TEMPLATE
|
||||||
from tests import EVENT_WITH_TIMEZONE_TEMPLATE
|
from tests import EVENT_WITH_TIMEZONE_TEMPLATE
|
||||||
|
from tests import VCARD_TEMPLATE
|
||||||
from tests import normalize_item
|
from tests import normalize_item
|
||||||
from tests import uid_strategy
|
from tests import uid_strategy
|
||||||
from tests import VCARD_TEMPLATE
|
|
||||||
|
|
||||||
|
|
||||||
_simple_split = [
|
_simple_split = [
|
||||||
VCARD_TEMPLATE.format(r=123, uid=123),
|
VCARD_TEMPLATE.format(r=123, uid=123),
|
||||||
|
|
@ -24,7 +25,7 @@ _simple_split = [
|
||||||
]
|
]
|
||||||
|
|
||||||
_simple_joined = "\r\n".join(
|
_simple_joined = "\r\n".join(
|
||||||
["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"]
|
["BEGIN:VADDRESSBOOK", *_simple_split, "END:VADDRESSBOOK\r\n"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -123,7 +124,7 @@ def test_split_collection_timezones():
|
||||||
"END:VTIMEZONE"
|
"END:VTIMEZONE"
|
||||||
)
|
)
|
||||||
|
|
||||||
full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"])
|
full = "\r\n".join(["BEGIN:VCALENDAR", *items, timezone, "END:VCALENDAR"])
|
||||||
|
|
||||||
given = {normalize_item(item) for item in vobject.split_collection(full)}
|
given = {normalize_item(item) for item in vobject.split_collection(full)}
|
||||||
expected = {
|
expected = {
|
||||||
|
|
@ -153,7 +154,7 @@ def test_hash_item():
|
||||||
|
|
||||||
|
|
||||||
def test_multiline_uid(benchmark):
|
def test_multiline_uid(benchmark):
|
||||||
a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n"
|
a = "BEGIN:FOO\r\nUID:123456789abcd\r\n efgh\r\nEND:FOO\r\n"
|
||||||
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
|
assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh"
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -236,6 +237,31 @@ def test_broken_item():
|
||||||
assert item.parsed is None
|
assert item.parsed is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_mismatched_end():
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
vobject._Component.parse(
|
||||||
|
[
|
||||||
|
"BEGIN:FOO",
|
||||||
|
"END:BAR",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Got END:BAR, expected END:FOO at line 2" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_end():
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
vobject._Component.parse(
|
||||||
|
[
|
||||||
|
"BEGIN:FOO",
|
||||||
|
"BEGIN:BAR",
|
||||||
|
"END:BAR",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Missing END for component(s): FOO" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_items():
|
def test_multiple_items():
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
vobject._Component.parse(
|
vobject._Component.parse(
|
||||||
|
|
@ -273,7 +299,7 @@ def test_input_types():
|
||||||
|
|
||||||
value_strategy = st.text(
|
value_strategy = st.text(
|
||||||
st.characters(
|
st.characters(
|
||||||
blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":="
|
exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), exclude_characters=":="
|
||||||
),
|
),
|
||||||
min_size=1,
|
min_size=1,
|
||||||
).filter(lambda x: x.strip() == x)
|
).filter(lambda x: x.strip() == x)
|
||||||
|
|
@ -309,7 +335,8 @@ class VobjectMachine(RuleBasedStateMachine):
|
||||||
assert key in c
|
assert key in c
|
||||||
assert c.get(key) == value
|
assert c.get(key) == value
|
||||||
dump = "\r\n".join(c.dump_lines())
|
dump = "\r\n".join(c.dump_lines())
|
||||||
assert key in dump and value in dump
|
assert key in dump
|
||||||
|
assert value in dump
|
||||||
|
|
||||||
@rule(
|
@rule(
|
||||||
c=Parsed,
|
c=Parsed,
|
||||||
|
|
@ -339,6 +366,16 @@ class VobjectMachine(RuleBasedStateMachine):
|
||||||
TestVobjectMachine = VobjectMachine.TestCase
|
TestVobjectMachine = VobjectMachine.TestCase
|
||||||
|
|
||||||
|
|
||||||
|
def test_dupe_consecutive_keys():
|
||||||
|
state = VobjectMachine()
|
||||||
|
unparsed_0 = state.get_unparsed_lines(encoded=False, joined=False)
|
||||||
|
parsed_0 = state.parse(unparsed=unparsed_0)
|
||||||
|
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
|
||||||
|
state.add_prop_raw(c=parsed_0, key="0", params=[], value="0")
|
||||||
|
state.add_prop(c=parsed_0, key="0", value="1")
|
||||||
|
state.teardown()
|
||||||
|
|
||||||
|
|
||||||
def test_component_contains():
|
def test_component_contains():
|
||||||
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
|
item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"])
|
||||||
|
|
||||||
|
|
@ -346,4 +383,4 @@ def test_component_contains():
|
||||||
assert "BAZ" not in item
|
assert "BAZ" not in item
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
42 in item # noqa: B015
|
42 in item # noqa: B015, this check raises.
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,14 @@
|
||||||
Vdirsyncer synchronizes calendars and contacts.
|
Vdirsyncer synchronizes calendars and contacts.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
||||||
BUGTRACKER_HOME = PROJECT_HOME + "/issues"
|
BUGTRACKER_HOME = PROJECT_HOME + "/issues"
|
||||||
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
|
DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from .version import version as __version__ # noqa
|
from .version import version as __version__
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
"Failed to find (autogenerated) version.py. "
|
"Failed to find (autogenerated) version.py. "
|
||||||
|
|
@ -16,12 +17,14 @@ except ImportError: # pragma: no cover
|
||||||
"use the PyPI ones."
|
"use the PyPI ones."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = ["__version__"]
|
||||||
|
|
||||||
def _check_python_version(): # pragma: no cover
|
|
||||||
|
def _check_python_version():
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (3, 7, 0):
|
if sys.version_info < (3, 9, 0): # noqa: UP036
|
||||||
print("vdirsyncer requires at least Python 3.7.")
|
print("vdirsyncer requires at least Python 3.9.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from vdirsyncer.cli import app
|
from vdirsyncer.cli import app
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,24 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import click
|
import click
|
||||||
import click_log
|
import click_log
|
||||||
|
|
||||||
from .. import __version__
|
from vdirsyncer import BUGTRACKER_HOME
|
||||||
from .. import BUGTRACKER_HOME
|
from vdirsyncer import __version__
|
||||||
|
|
||||||
|
|
||||||
cli_logger = logging.getLogger(__name__)
|
cli_logger = logging.getLogger(__name__)
|
||||||
click_log.basic_config("vdirsyncer")
|
click_log.basic_config("vdirsyncer")
|
||||||
|
|
||||||
|
# add short option for the help option
|
||||||
|
click_context_settings = {"help_option_names": ["-h", "--help"]}
|
||||||
|
|
||||||
|
|
||||||
class AppContext:
|
class AppContext:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
@ -37,13 +44,13 @@ def catch_errors(f):
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group(context_settings=click_context_settings)
|
||||||
@click_log.simple_verbosity_option("vdirsyncer")
|
@click_log.simple_verbosity_option("vdirsyncer")
|
||||||
@click.version_option(version=__version__)
|
@click.version_option(version=__version__)
|
||||||
@click.option("--config", "-c", metavar="FILE", help="Config file to use.")
|
@click.option("--config", "-c", metavar="FILE", help="Config file to use.")
|
||||||
@pass_context
|
@pass_context
|
||||||
@catch_errors
|
@catch_errors
|
||||||
def app(ctx, config):
|
def app(ctx, config: str):
|
||||||
"""
|
"""
|
||||||
Synchronize calendars and contacts
|
Synchronize calendars and contacts
|
||||||
"""
|
"""
|
||||||
|
|
@ -52,7 +59,7 @@ def app(ctx, config):
|
||||||
cli_logger.warning(
|
cli_logger.warning(
|
||||||
"Vdirsyncer currently does not support Windows. "
|
"Vdirsyncer currently does not support Windows. "
|
||||||
"You will likely encounter bugs. "
|
"You will likely encounter bugs. "
|
||||||
"See {}/535 for more information.".format(BUGTRACKER_HOME)
|
f"See {BUGTRACKER_HOME}/535 for more information."
|
||||||
)
|
)
|
||||||
|
|
||||||
if not ctx.config:
|
if not ctx.config:
|
||||||
|
|
@ -61,36 +68,6 @@ def app(ctx, config):
|
||||||
ctx.config = load_config(config)
|
ctx.config = load_config(config)
|
||||||
|
|
||||||
|
|
||||||
main = app
|
|
||||||
|
|
||||||
|
|
||||||
def max_workers_callback(ctx, param, value):
|
|
||||||
if value == 0 and logging.getLogger("vdirsyncer").level == logging.DEBUG:
|
|
||||||
value = 1
|
|
||||||
|
|
||||||
cli_logger.debug(f"Using {value} maximal workers.")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def max_workers_option(default=0):
|
|
||||||
help = "Use at most this many connections. "
|
|
||||||
if default == 0:
|
|
||||||
help += (
|
|
||||||
'The default is 0, which means "as many as necessary". '
|
|
||||||
"With -vdebug enabled, the default is 1."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
help += f"The default is {default}."
|
|
||||||
|
|
||||||
return click.option(
|
|
||||||
"--max-workers",
|
|
||||||
default=default,
|
|
||||||
type=click.IntRange(min=0, max=None),
|
|
||||||
callback=max_workers_callback,
|
|
||||||
help=help,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def collections_arg_callback(ctx, param, value):
|
def collections_arg_callback(ctx, param, value):
|
||||||
"""
|
"""
|
||||||
Expand the various CLI shortforms ("pair, pair/collection") to an iterable
|
Expand the various CLI shortforms ("pair, pair/collection") to an iterable
|
||||||
|
|
@ -125,10 +102,9 @@ collections_arg = click.argument(
|
||||||
"to be deleted from both sides."
|
"to be deleted from both sides."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@max_workers_option()
|
|
||||||
@pass_context
|
@pass_context
|
||||||
@catch_errors
|
@catch_errors
|
||||||
def sync(ctx, collections, force_delete, max_workers):
|
def sync(ctx, collections, force_delete):
|
||||||
"""
|
"""
|
||||||
Synchronize the given collections or pairs. If no arguments are given, all
|
Synchronize the given collections or pairs. If no arguments are given, all
|
||||||
will be synchronized.
|
will be synchronized.
|
||||||
|
|
@ -149,54 +125,75 @@ def sync(ctx, collections, force_delete, max_workers):
|
||||||
# Sync only "first_collection" from the pair "bob"
|
# Sync only "first_collection" from the pair "bob"
|
||||||
vdirsyncer sync bob/first_collection
|
vdirsyncer sync bob/first_collection
|
||||||
"""
|
"""
|
||||||
from .tasks import prepare_pair, sync_collection
|
from .tasks import prepare_pair
|
||||||
from .utils import WorkerQueue
|
from .tasks import sync_collection
|
||||||
|
|
||||||
wq = WorkerQueue(max_workers)
|
async def main(collection_names):
|
||||||
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
with wq.join():
|
tasks = []
|
||||||
for pair_name, collections in collections:
|
for pair_name, collections in collection_names:
|
||||||
wq.put(
|
async for collection, config in prepare_pair(
|
||||||
functools.partial(
|
|
||||||
prepare_pair,
|
|
||||||
pair_name=pair_name,
|
pair_name=pair_name,
|
||||||
collections=collections,
|
collections=collections,
|
||||||
config=ctx.config,
|
config=ctx.config,
|
||||||
force_delete=force_delete,
|
connector=conn,
|
||||||
callback=sync_collection,
|
):
|
||||||
)
|
tasks.append(
|
||||||
)
|
sync_collection(
|
||||||
wq.spawn_worker()
|
collection=collection,
|
||||||
|
general=config,
|
||||||
|
force_delete=force_delete,
|
||||||
|
connector=conn,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# `return_exceptions=True` ensures that the event loop lives long enough for
|
||||||
|
# backoffs to be able to finish
|
||||||
|
gathered = await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
# but now we need to manually check for and propogate a single failure after
|
||||||
|
# allowing all tasks to finish in order to keep exit status non-zero
|
||||||
|
failures = [e for e in gathered if isinstance(e, BaseException)]
|
||||||
|
if failures:
|
||||||
|
raise failures[0]
|
||||||
|
|
||||||
|
asyncio.run(main(collections))
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
@collections_arg
|
@collections_arg
|
||||||
@max_workers_option()
|
|
||||||
@pass_context
|
@pass_context
|
||||||
@catch_errors
|
@catch_errors
|
||||||
def metasync(ctx, collections, max_workers):
|
def metasync(ctx, collections):
|
||||||
"""
|
"""
|
||||||
Synchronize metadata of the given collections or pairs.
|
Synchronize metadata of the given collections or pairs.
|
||||||
|
|
||||||
See the `sync` command for usage.
|
See the `sync` command for usage.
|
||||||
"""
|
"""
|
||||||
from .tasks import prepare_pair, metasync_collection
|
from .tasks import metasync_collection
|
||||||
from .utils import WorkerQueue
|
from .tasks import prepare_pair
|
||||||
|
|
||||||
wq = WorkerQueue(max_workers)
|
async def main(collection_names):
|
||||||
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
with wq.join():
|
for pair_name, collections in collection_names:
|
||||||
for pair_name, collections in collections:
|
collections = prepare_pair(
|
||||||
wq.put(
|
|
||||||
functools.partial(
|
|
||||||
prepare_pair,
|
|
||||||
pair_name=pair_name,
|
pair_name=pair_name,
|
||||||
collections=collections,
|
collections=collections,
|
||||||
config=ctx.config,
|
config=ctx.config,
|
||||||
callback=metasync_collection,
|
connector=conn,
|
||||||
)
|
)
|
||||||
)
|
|
||||||
wq.spawn_worker()
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
metasync_collection(
|
||||||
|
collection=collection,
|
||||||
|
general=config,
|
||||||
|
connector=conn,
|
||||||
|
)
|
||||||
|
async for collection, config in collections
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
asyncio.run(main(collections))
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
|
|
@ -209,33 +206,28 @@ def metasync(ctx, collections, max_workers):
|
||||||
"for debugging. This is slow and may crash for broken servers."
|
"for debugging. This is slow and may crash for broken servers."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@max_workers_option(default=1)
|
|
||||||
@pass_context
|
@pass_context
|
||||||
@catch_errors
|
@catch_errors
|
||||||
def discover(ctx, pairs, max_workers, list):
|
def discover(ctx, pairs, list):
|
||||||
"""
|
"""
|
||||||
Refresh collection cache for the given pairs.
|
Refresh collection cache for the given pairs.
|
||||||
"""
|
"""
|
||||||
from .tasks import discover_collections
|
from .tasks import discover_collections
|
||||||
from .utils import WorkerQueue
|
|
||||||
|
|
||||||
config = ctx.config
|
config = ctx.config
|
||||||
wq = WorkerQueue(max_workers)
|
|
||||||
|
|
||||||
with wq.join():
|
async def main():
|
||||||
for pair_name in pairs or config.pairs:
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
pair = config.get_pair(pair_name)
|
for pair_name in pairs or config.pairs:
|
||||||
|
await discover_collections(
|
||||||
wq.put(
|
|
||||||
functools.partial(
|
|
||||||
discover_collections,
|
|
||||||
status_path=config.general["status_path"],
|
status_path=config.general["status_path"],
|
||||||
pair=pair,
|
pair=config.get_pair(pair_name),
|
||||||
from_cache=False,
|
from_cache=False,
|
||||||
list_collections=list,
|
list_collections=list,
|
||||||
|
connector=conn,
|
||||||
)
|
)
|
||||||
)
|
|
||||||
wq.spawn_worker()
|
asyncio.run(main())
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
|
|
@ -274,4 +266,27 @@ def repair(ctx, collection, repair_unsafe_uid):
|
||||||
"turn off other client's synchronization features."
|
"turn off other client's synchronization features."
|
||||||
)
|
)
|
||||||
click.confirm("Do you want to continue?", abort=True)
|
click.confirm("Do you want to continue?", abort=True)
|
||||||
repair_collection(ctx.config, collection, repair_unsafe_uid=repair_unsafe_uid)
|
|
||||||
|
async def main():
|
||||||
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
|
await repair_collection(
|
||||||
|
ctx.config,
|
||||||
|
collection,
|
||||||
|
repair_unsafe_uid=repair_unsafe_uid,
|
||||||
|
connector=conn,
|
||||||
|
)
|
||||||
|
|
||||||
|
asyncio.run(main())
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
@pass_context
|
||||||
|
@catch_errors
|
||||||
|
def showconfig(ctx: AppContext):
|
||||||
|
"""Show the current configuration.
|
||||||
|
|
||||||
|
This is mostly intended to be used by scripts or other integrations.
|
||||||
|
If you need additional information in this dump, please reach out.
|
||||||
|
"""
|
||||||
|
config = {"storages": list(ctx.config.storages.values())}
|
||||||
|
click.echo(json.dumps(config, indent=2))
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,23 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
|
from collections.abc import Generator
|
||||||
from configparser import RawConfigParser
|
from configparser import RawConfigParser
|
||||||
|
from functools import cached_property
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
from typing import IO
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from click_threading import get_ui_worker
|
from vdirsyncer import PROJECT_HOME
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.utils import expand_path
|
||||||
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
from .. import exceptions
|
|
||||||
from .. import PROJECT_HOME
|
|
||||||
from ..utils import cached_property
|
|
||||||
from ..utils import expand_path
|
|
||||||
from .fetchparams import expand_fetch_params
|
from .fetchparams import expand_fetch_params
|
||||||
from .utils import storage_class_from_config
|
from .utils import storage_class_from_config
|
||||||
|
|
||||||
|
|
||||||
GENERAL_ALL = frozenset(["status_path"])
|
GENERAL_ALL = frozenset(["status_path"])
|
||||||
GENERAL_REQUIRED = frozenset(["status_path"])
|
GENERAL_REQUIRED = frozenset(["status_path"])
|
||||||
SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_"))
|
SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_"))
|
||||||
|
|
@ -24,16 +28,16 @@ def validate_section_name(name, section_type):
|
||||||
if invalid:
|
if invalid:
|
||||||
chars_display = "".join(sorted(SECTION_NAME_CHARS))
|
chars_display = "".join(sorted(SECTION_NAME_CHARS))
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
'The {}-section "{}" contains invalid characters. Only '
|
f'The {section_type}-section "{name}" contains invalid characters. Only '
|
||||||
"the following characters are allowed for storage and "
|
"the following characters are allowed for storage and "
|
||||||
"pair names:\n{}".format(section_type, name, chars_display)
|
f"pair names:\n{chars_display}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _validate_general_section(general_config):
|
def _validate_general_section(general_config: dict[str, str]):
|
||||||
invalid = set(general_config) - GENERAL_ALL
|
invalid = set(general_config) - GENERAL_ALL
|
||||||
missing = GENERAL_REQUIRED - set(general_config)
|
missing = GENERAL_REQUIRED - set(general_config)
|
||||||
problems = []
|
problems: list[str] = []
|
||||||
|
|
||||||
if invalid:
|
if invalid:
|
||||||
problems.append(
|
problems.append(
|
||||||
|
|
@ -48,7 +52,7 @@ def _validate_general_section(general_config):
|
||||||
if problems:
|
if problems:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Invalid general section. Copy the example "
|
"Invalid general section. Copy the example "
|
||||||
"config from the repository and edit it: {}".format(PROJECT_HOME),
|
f"config from the repository and edit it: {PROJECT_HOME}",
|
||||||
problems=problems,
|
problems=problems,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -89,23 +93,31 @@ def _validate_collections_param(collections):
|
||||||
raise ValueError("Duplicate value.")
|
raise ValueError("Duplicate value.")
|
||||||
collection_names.add(collection_name)
|
collection_names.add(collection_name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError(
|
raise ValueError(f"`collections` parameter, position {i}: {e!s}")
|
||||||
"`collections` parameter, position {i}: {e}".format(i=i, e=str(e))
|
|
||||||
)
|
|
||||||
|
def _validate_implicit_param(implicit):
|
||||||
|
if implicit is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if implicit != "create":
|
||||||
|
raise ValueError("`implicit` parameter must be 'create' or absent.")
|
||||||
|
|
||||||
|
|
||||||
class _ConfigReader:
|
class _ConfigReader:
|
||||||
def __init__(self, f):
|
def __init__(self, f: IO[Any]):
|
||||||
self._file = f
|
self._file: IO[Any] = f
|
||||||
self._parser = c = RawConfigParser()
|
self._parser = c = RawConfigParser()
|
||||||
c.read_file(f)
|
c.read_file(f)
|
||||||
self._seen_names = set()
|
self._seen_names: set = set()
|
||||||
|
|
||||||
self._general = {}
|
self._general: dict[str, str] = {}
|
||||||
self._pairs = {}
|
self._pairs: dict[str, dict[str, str]] = {}
|
||||||
self._storages = {}
|
self._storages: dict[str, dict[str, str]] = {}
|
||||||
|
|
||||||
def _parse_section(self, section_type, name, options):
|
def _parse_section(
|
||||||
|
self, section_type: str, name: str, options: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
validate_section_name(name, section_type)
|
validate_section_name(name, section_type)
|
||||||
if name in self._seen_names:
|
if name in self._seen_names:
|
||||||
raise ValueError(f'Name "{name}" already used.')
|
raise ValueError(f'Name "{name}" already used.')
|
||||||
|
|
@ -122,7 +134,9 @@ class _ConfigReader:
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown section type.")
|
raise ValueError("Unknown section type.")
|
||||||
|
|
||||||
def parse(self):
|
def parse(
|
||||||
|
self,
|
||||||
|
) -> tuple[dict[str, str], dict[str, dict[str, str]], dict[str, dict[str, str]]]:
|
||||||
for section in self._parser.sections():
|
for section in self._parser.sections():
|
||||||
if " " in section:
|
if " " in section:
|
||||||
section_type, name = section.split(" ", 1)
|
section_type, name = section.split(" ", 1)
|
||||||
|
|
@ -136,7 +150,7 @@ class _ConfigReader:
|
||||||
dict(_parse_options(self._parser.items(section), section=section)),
|
dict(_parse_options(self._parser.items(section), section=section)),
|
||||||
)
|
)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise exceptions.UserError('Section "{}": {}'.format(section, str(e)))
|
raise exceptions.UserError(f'Section "{section}": {e!s}')
|
||||||
|
|
||||||
_validate_general_section(self._general)
|
_validate_general_section(self._general)
|
||||||
if getattr(self._file, "name", None):
|
if getattr(self._file, "name", None):
|
||||||
|
|
@ -148,22 +162,29 @@ class _ConfigReader:
|
||||||
return self._general, self._pairs, self._storages
|
return self._general, self._pairs, self._storages
|
||||||
|
|
||||||
|
|
||||||
def _parse_options(items, section=None):
|
def _parse_options(
|
||||||
|
items: list[tuple[str, str]], section: str | None = None
|
||||||
|
) -> Generator[tuple[str, dict[str, str]], None, None]:
|
||||||
for key, value in items:
|
for key, value in items:
|
||||||
try:
|
try:
|
||||||
yield key, json.loads(value)
|
yield key, json.loads(value)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError('Section "{}", option "{}": {}'.format(section, key, e))
|
raise ValueError(f'Section "{section}", option "{key}": {e}')
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
def __init__(self, general, pairs, storages):
|
def __init__(
|
||||||
|
self,
|
||||||
|
general: dict[str, str],
|
||||||
|
pairs: dict[str, dict[str, str]],
|
||||||
|
storages: dict[str, dict[str, str]],
|
||||||
|
) -> None:
|
||||||
self.general = general
|
self.general = general
|
||||||
self.storages = storages
|
self.storages = storages
|
||||||
for name, options in storages.items():
|
for name, options in storages.items():
|
||||||
options["instance_name"] = name
|
options["instance_name"] = name
|
||||||
|
|
||||||
self.pairs = {}
|
self.pairs: dict[str, PairConfig] = {}
|
||||||
for name, options in pairs.items():
|
for name, options in pairs.items():
|
||||||
try:
|
try:
|
||||||
self.pairs[name] = PairConfig(self, name, options)
|
self.pairs[name] = PairConfig(self, name, options)
|
||||||
|
|
@ -171,12 +192,12 @@ class Config:
|
||||||
raise exceptions.UserError(f"Pair {name}: {e}")
|
raise exceptions.UserError(f"Pair {name}: {e}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_fileobject(cls, f):
|
def from_fileobject(cls, f: IO[Any]):
|
||||||
reader = _ConfigReader(f)
|
reader = _ConfigReader(f)
|
||||||
return cls(*reader.parse())
|
return cls(*reader.parse())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_filename_or_environment(cls, fname=None):
|
def from_filename_or_environment(cls, fname: str | None = None):
|
||||||
if fname is None:
|
if fname is None:
|
||||||
fname = os.environ.get("VDIRSYNCER_CONFIG", None)
|
fname = os.environ.get("VDIRSYNCER_CONFIG", None)
|
||||||
if fname is None:
|
if fname is None:
|
||||||
|
|
@ -191,24 +212,20 @@ class Config:
|
||||||
with open(fname) as f:
|
with open(fname) as f:
|
||||||
return cls.from_fileobject(f)
|
return cls.from_fileobject(f)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(f"Error during reading config {fname}: {e}")
|
||||||
"Error during reading config {}: {}".format(fname, e)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_storage_args(self, storage_name):
|
def get_storage_args(self, storage_name: str):
|
||||||
try:
|
try:
|
||||||
args = self.storages[storage_name]
|
args = self.storages[storage_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Storage {!r} not found. "
|
f"Storage {storage_name!r} not found. "
|
||||||
"These are the configured storages: {}".format(
|
f"These are the configured storages: {list(self.storages)}"
|
||||||
storage_name, list(self.storages)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return expand_fetch_params(args)
|
return expand_fetch_params(args)
|
||||||
|
|
||||||
def get_pair(self, pair_name):
|
def get_pair(self, pair_name: str) -> PairConfig:
|
||||||
try:
|
try:
|
||||||
return self.pairs[pair_name]
|
return self.pairs[pair_name]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
|
|
@ -216,14 +233,15 @@ class Config:
|
||||||
|
|
||||||
|
|
||||||
class PairConfig:
|
class PairConfig:
|
||||||
def __init__(self, full_config, name, options):
|
def __init__(self, full_config: Config, name: str, options: dict[str, str]):
|
||||||
self._config = full_config
|
self._config: Config = full_config
|
||||||
self.name = name
|
self.name: str = name
|
||||||
self.name_a = options.pop("a")
|
self.name_a: str = options.pop("a")
|
||||||
self.name_b = options.pop("b")
|
self.name_b: str = options.pop("b")
|
||||||
|
self.implicit = options.pop("implicit", None)
|
||||||
|
|
||||||
self._partial_sync = options.pop("partial_sync", None)
|
self._partial_sync: str | None = options.pop("partial_sync", None)
|
||||||
self.metadata = options.pop("metadata", None) or ()
|
self.metadata: str | tuple[()] = options.pop("metadata", ())
|
||||||
|
|
||||||
self.conflict_resolution = self._process_conflict_resolution_param(
|
self.conflict_resolution = self._process_conflict_resolution_param(
|
||||||
options.pop("conflict_resolution", None)
|
options.pop("conflict_resolution", None)
|
||||||
|
|
@ -239,14 +257,17 @@ class PairConfig:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_validate_collections_param(self.collections)
|
_validate_collections_param(self.collections)
|
||||||
|
_validate_implicit_param(self.implicit)
|
||||||
|
|
||||||
if options:
|
if options:
|
||||||
raise ValueError("Unknown options: {}".format(", ".join(options)))
|
raise ValueError("Unknown options: {}".format(", ".join(options)))
|
||||||
|
|
||||||
def _process_conflict_resolution_param(self, conflict_resolution):
|
def _process_conflict_resolution_param(
|
||||||
|
self, conflict_resolution: str | list[str] | None
|
||||||
|
):
|
||||||
if conflict_resolution in (None, "a wins", "b wins"):
|
if conflict_resolution in (None, "a wins", "b wins"):
|
||||||
return conflict_resolution
|
return conflict_resolution
|
||||||
elif (
|
if (
|
||||||
isinstance(conflict_resolution, list)
|
isinstance(conflict_resolution, list)
|
||||||
and len(conflict_resolution) > 1
|
and len(conflict_resolution) > 1
|
||||||
and conflict_resolution[0] == "command"
|
and conflict_resolution[0] == "command"
|
||||||
|
|
@ -257,15 +278,10 @@ class PairConfig:
|
||||||
b_name = self.config_b["instance_name"]
|
b_name = self.config_b["instance_name"]
|
||||||
command = conflict_resolution[1:]
|
command = conflict_resolution[1:]
|
||||||
|
|
||||||
def inner():
|
return _resolve_conflict_via_command(a, b, command, a_name, b_name)
|
||||||
return _resolve_conflict_via_command(a, b, command, a_name, b_name)
|
|
||||||
|
|
||||||
ui_worker = get_ui_worker()
|
|
||||||
return ui_worker.put(inner)
|
|
||||||
|
|
||||||
return resolve
|
return resolve
|
||||||
else:
|
raise ValueError("Invalid value for `conflict_resolution`.")
|
||||||
raise ValueError("Invalid value for `conflict_resolution`.")
|
|
||||||
|
|
||||||
# The following parameters are lazily evaluated because evaluating
|
# The following parameters are lazily evaluated because evaluating
|
||||||
# self.config_a would expand all `x.fetch` parameters. This is costly and
|
# self.config_a would expand all `x.fetch` parameters. This is costly and
|
||||||
|
|
@ -311,10 +327,10 @@ class PairConfig:
|
||||||
|
|
||||||
|
|
||||||
class CollectionConfig:
|
class CollectionConfig:
|
||||||
def __init__(self, pair, name, config_a, config_b):
|
def __init__(self, pair, name: str, config_a, config_b):
|
||||||
self.pair = pair
|
self.pair = pair
|
||||||
self._config = pair._config
|
self._config = pair._config
|
||||||
self.name = name
|
self.name: str = name
|
||||||
self.config_a = config_a
|
self.config_a = config_a
|
||||||
self.config_b = config_b
|
self.config_b = config_b
|
||||||
|
|
||||||
|
|
@ -323,14 +339,16 @@ class CollectionConfig:
|
||||||
load_config = Config.from_filename_or_environment
|
load_config = Config.from_filename_or_environment
|
||||||
|
|
||||||
|
|
||||||
def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None):
|
def _resolve_conflict_via_command(
|
||||||
import tempfile
|
a, b, command, a_name, b_name, _check_call=None
|
||||||
|
) -> Item:
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
if _check_call is None:
|
if _check_call is None:
|
||||||
from subprocess import check_call as _check_call
|
from subprocess import check_call as _check_call
|
||||||
|
|
||||||
from ..vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
|
dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.")
|
||||||
try:
|
try:
|
||||||
|
|
@ -343,7 +361,7 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=Non
|
||||||
f.write(b.raw)
|
f.write(b.raw)
|
||||||
|
|
||||||
command[0] = expand_path(command[0])
|
command[0] = expand_path(command[0])
|
||||||
_check_call(command + [a_tmp, b_tmp])
|
_check_call([*command, a_tmp, b_tmp])
|
||||||
|
|
||||||
with open(a_tmp) as f:
|
with open(a_tmp) as f:
|
||||||
new_a = f.read()
|
new_a = f.read()
|
||||||
|
|
@ -351,7 +369,7 @@ def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=Non
|
||||||
new_b = f.read()
|
new_b = f.read()
|
||||||
|
|
||||||
if new_a != new_b:
|
if new_a != new_b:
|
||||||
raise exceptions.UserError("The two files are not completely " "equal.")
|
raise exceptions.UserError("The two files are not completely equal.")
|
||||||
return Item(new_a)
|
return Item(new_a)
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(dir)
|
shutil.rmtree(dir)
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,16 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .. import exceptions
|
import aiohttp
|
||||||
from ..utils import cached_property
|
import aiostream
|
||||||
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
|
||||||
from .utils import handle_collection_not_found
|
from .utils import handle_collection_not_found
|
||||||
from .utils import handle_storage_init_error
|
from .utils import handle_storage_init_error
|
||||||
from .utils import load_status
|
from .utils import load_status
|
||||||
|
|
@ -12,7 +18,6 @@ from .utils import save_status
|
||||||
from .utils import storage_class_from_config
|
from .utils import storage_class_from_config
|
||||||
from .utils import storage_instance_from_config
|
from .utils import storage_instance_from_config
|
||||||
|
|
||||||
|
|
||||||
# Increase whenever upgrade potentially breaks discovery cache and collections
|
# Increase whenever upgrade potentially breaks discovery cache and collections
|
||||||
# should be re-discovered
|
# should be re-discovered
|
||||||
DISCOVERY_CACHE_VERSION = 1
|
DISCOVERY_CACHE_VERSION = 1
|
||||||
|
|
@ -35,7 +40,14 @@ def _get_collections_cache_key(pair):
|
||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def collections_for_pair(status_path, pair, from_cache=True, list_collections=False):
|
async def collections_for_pair(
|
||||||
|
status_path,
|
||||||
|
pair,
|
||||||
|
from_cache=True,
|
||||||
|
list_collections=False,
|
||||||
|
*,
|
||||||
|
connector: aiohttp.TCPConnector,
|
||||||
|
):
|
||||||
"""Determine all configured collections for a given pair. Takes care of
|
"""Determine all configured collections for a given pair. Takes care of
|
||||||
shortcut expansion and result caching.
|
shortcut expansion and result caching.
|
||||||
|
|
||||||
|
|
@ -54,44 +66,58 @@ def collections_for_pair(status_path, pair, from_cache=True, list_collections=Fa
|
||||||
rv["collections"], pair.config_a, pair.config_b
|
rv["collections"], pair.config_a, pair.config_b
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif rv:
|
if rv:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Detected change in config file, "
|
"Detected change in config file, "
|
||||||
"please run `vdirsyncer discover {}`.".format(pair.name)
|
f"please run `vdirsyncer discover {pair.name}`."
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise exceptions.UserError(
|
|
||||||
"Please run `vdirsyncer discover {}` "
|
|
||||||
" before synchronization.".format(pair.name)
|
|
||||||
)
|
)
|
||||||
|
raise exceptions.UserError(
|
||||||
|
f"Please run `vdirsyncer discover {pair.name}` before synchronization."
|
||||||
|
)
|
||||||
|
|
||||||
logger.info("Discovering collections for pair {}".format(pair.name))
|
logger.info(f"Discovering collections for pair {pair.name}")
|
||||||
|
|
||||||
a_discovered = _DiscoverResult(pair.config_a)
|
a_discovered = DiscoverResult(pair.config_a, connector=connector)
|
||||||
b_discovered = _DiscoverResult(pair.config_b)
|
b_discovered = DiscoverResult(pair.config_b, connector=connector)
|
||||||
|
|
||||||
if list_collections:
|
if list_collections:
|
||||||
_print_collections(pair.config_a["instance_name"], a_discovered.get_self)
|
# TODO: We should gather data and THEN print, so it can be async.
|
||||||
_print_collections(pair.config_b["instance_name"], b_discovered.get_self)
|
await _print_collections(
|
||||||
|
pair.config_a["instance_name"],
|
||||||
|
a_discovered.get_self,
|
||||||
|
connector=connector,
|
||||||
|
)
|
||||||
|
await _print_collections(
|
||||||
|
pair.config_b["instance_name"],
|
||||||
|
b_discovered.get_self,
|
||||||
|
connector=connector,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _handle_collection_not_found(
|
||||||
|
config, collection, e=None, implicit_create=False
|
||||||
|
):
|
||||||
|
return await handle_collection_not_found(
|
||||||
|
config, collection, e=e, implicit_create=pair.implicit == "create"
|
||||||
|
)
|
||||||
|
|
||||||
# We have to use a list here because the special None/null value would get
|
# We have to use a list here because the special None/null value would get
|
||||||
# mangled to string (because JSON objects always have string keys).
|
# mangled to string (because JSON objects always have string keys).
|
||||||
rv = list(
|
rv = await aiostream.stream.list( # type: ignore[assignment]
|
||||||
expand_collections(
|
expand_collections(
|
||||||
shortcuts=pair.collections,
|
shortcuts=pair.collections,
|
||||||
config_a=pair.config_a,
|
config_a=pair.config_a,
|
||||||
config_b=pair.config_b,
|
config_b=pair.config_b,
|
||||||
get_a_discovered=a_discovered.get_self,
|
get_a_discovered=a_discovered.get_self,
|
||||||
get_b_discovered=b_discovered.get_self,
|
get_b_discovered=b_discovered.get_self,
|
||||||
_handle_collection_not_found=handle_collection_not_found,
|
_handle_collection_not_found=_handle_collection_not_found,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
_sanity_check_collections(rv)
|
await _sanity_check_collections(rv, connector=connector)
|
||||||
|
|
||||||
save_status(
|
save_status(
|
||||||
status_path,
|
base_path=status_path,
|
||||||
pair.name,
|
pair=pair.name,
|
||||||
data_type="collections",
|
data_type="collections",
|
||||||
data={
|
data={
|
||||||
"collections": list(
|
"collections": list(
|
||||||
|
|
@ -103,10 +129,14 @@ def collections_for_pair(status_path, pair, from_cache=True, list_collections=Fa
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def _sanity_check_collections(collections):
|
async def _sanity_check_collections(collections, *, connector):
|
||||||
|
tasks = []
|
||||||
|
|
||||||
for _, (a_args, b_args) in collections:
|
for _, (a_args, b_args) in collections:
|
||||||
storage_instance_from_config(a_args)
|
tasks.append(storage_instance_from_config(a_args, connector=connector))
|
||||||
storage_instance_from_config(b_args)
|
tasks.append(storage_instance_from_config(b_args, connector=connector))
|
||||||
|
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
|
||||||
def _compress_collections_cache(collections, config_a, config_b):
|
def _compress_collections_cache(collections, config_a, config_b):
|
||||||
|
|
@ -133,18 +163,30 @@ def _expand_collections_cache(collections, config_a, config_b):
|
||||||
yield name, (a, b)
|
yield name, (a, b)
|
||||||
|
|
||||||
|
|
||||||
class _DiscoverResult:
|
class DiscoverResult:
|
||||||
def __init__(self, config):
|
def __init__(self, config, *, connector):
|
||||||
self._cls, _ = storage_class_from_config(config)
|
self._cls, _ = storage_class_from_config(config)
|
||||||
self._config = config
|
|
||||||
|
|
||||||
def get_self(self):
|
if self._cls.__name__ in [
|
||||||
|
"CardDAVStorage",
|
||||||
|
"CalDAVStorage",
|
||||||
|
"GoogleCalendarStorage",
|
||||||
|
"GoogleContactsStorage",
|
||||||
|
]:
|
||||||
|
assert connector is not None
|
||||||
|
config["connector"] = connector
|
||||||
|
|
||||||
|
self._config = config
|
||||||
|
self._discovered = None
|
||||||
|
|
||||||
|
async def get_self(self):
|
||||||
|
if self._discovered is None:
|
||||||
|
self._discovered = await self._discover()
|
||||||
return self._discovered
|
return self._discovered
|
||||||
|
|
||||||
@cached_property
|
async def _discover(self):
|
||||||
def _discovered(self):
|
|
||||||
try:
|
try:
|
||||||
discovered = list(self._cls.discover(**self._config))
|
discovered = await aiostream.stream.list(self._cls.discover(**self._config))
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
return {}
|
return {}
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
@ -158,7 +200,7 @@ class _DiscoverResult:
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def expand_collections(
|
async def expand_collections(
|
||||||
shortcuts,
|
shortcuts,
|
||||||
config_a,
|
config_a,
|
||||||
config_b,
|
config_b,
|
||||||
|
|
@ -173,9 +215,9 @@ def expand_collections(
|
||||||
|
|
||||||
for shortcut in shortcuts:
|
for shortcut in shortcuts:
|
||||||
if shortcut == "from a":
|
if shortcut == "from a":
|
||||||
collections = get_a_discovered()
|
collections = await get_a_discovered()
|
||||||
elif shortcut == "from b":
|
elif shortcut == "from b":
|
||||||
collections = get_b_discovered()
|
collections = await get_b_discovered()
|
||||||
else:
|
else:
|
||||||
collections = [shortcut]
|
collections = [shortcut]
|
||||||
|
|
||||||
|
|
@ -189,17 +231,23 @@ def expand_collections(
|
||||||
continue
|
continue
|
||||||
handled_collections.add(collection)
|
handled_collections.add(collection)
|
||||||
|
|
||||||
a_args = _collection_from_discovered(
|
a_args = await _collection_from_discovered(
|
||||||
get_a_discovered, collection_a, config_a, _handle_collection_not_found
|
get_a_discovered,
|
||||||
|
collection_a,
|
||||||
|
config_a,
|
||||||
|
_handle_collection_not_found,
|
||||||
)
|
)
|
||||||
b_args = _collection_from_discovered(
|
b_args = await _collection_from_discovered(
|
||||||
get_b_discovered, collection_b, config_b, _handle_collection_not_found
|
get_b_discovered,
|
||||||
|
collection_b,
|
||||||
|
config_b,
|
||||||
|
_handle_collection_not_found,
|
||||||
)
|
)
|
||||||
|
|
||||||
yield collection, (a_args, b_args)
|
yield collection, (a_args, b_args)
|
||||||
|
|
||||||
|
|
||||||
def _collection_from_discovered(
|
async def _collection_from_discovered(
|
||||||
get_discovered, collection, config, _handle_collection_not_found
|
get_discovered, collection, config, _handle_collection_not_found
|
||||||
):
|
):
|
||||||
if collection is None:
|
if collection is None:
|
||||||
|
|
@ -208,14 +256,19 @@ def _collection_from_discovered(
|
||||||
return args
|
return args
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return get_discovered()[collection]
|
return (await get_discovered())[collection]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return _handle_collection_not_found(config, collection)
|
return await _handle_collection_not_found(config, collection)
|
||||||
|
|
||||||
|
|
||||||
def _print_collections(instance_name, get_discovered):
|
async def _print_collections(
|
||||||
|
instance_name: str,
|
||||||
|
get_discovered,
|
||||||
|
*,
|
||||||
|
connector: aiohttp.TCPConnector,
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
discovered = get_discovered()
|
discovered = await get_discovered()
|
||||||
except exceptions.UserError:
|
except exceptions.UserError:
|
||||||
raise
|
raise
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
@ -226,28 +279,37 @@ def _print_collections(instance_name, get_discovered):
|
||||||
|
|
||||||
logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
|
logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to discover collections for {}, use `-vdebug` "
|
f"Failed to discover collections for {instance_name}, use `-vdebug` "
|
||||||
"to see the full traceback.".format(instance_name)
|
"to see the full traceback."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
logger.info(f"{instance_name}:")
|
logger.info(f"{instance_name}:")
|
||||||
|
tasks = []
|
||||||
for args in discovered.values():
|
for args in discovered.values():
|
||||||
collection = args["collection"]
|
tasks.append(_print_single_collection(args, instance_name, connector))
|
||||||
if collection is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
args["instance_name"] = instance_name
|
await asyncio.gather(*tasks)
|
||||||
try:
|
|
||||||
storage = storage_instance_from_config(args, create=False)
|
|
||||||
displayname = storage.get_meta("displayname")
|
|
||||||
except Exception:
|
|
||||||
displayname = ""
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
" - {}{}".format(
|
async def _print_single_collection(args, instance_name, connector):
|
||||||
json.dumps(collection),
|
collection = args["collection"]
|
||||||
f' ("{displayname}")'
|
if collection is None:
|
||||||
if displayname and displayname != collection
|
return
|
||||||
else "",
|
|
||||||
)
|
args["instance_name"] = instance_name
|
||||||
|
try:
|
||||||
|
storage = await storage_instance_from_config(
|
||||||
|
args,
|
||||||
|
create=False,
|
||||||
|
connector=connector,
|
||||||
)
|
)
|
||||||
|
displayname = await storage.get_meta("displayname")
|
||||||
|
except Exception:
|
||||||
|
displayname = ""
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
" - {}{}".format(
|
||||||
|
json.dumps(collection),
|
||||||
|
f' ("{displayname}")' if displayname and displayname != collection else "",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,14 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.utils import expand_path
|
||||||
|
from vdirsyncer.utils import synchronized
|
||||||
|
|
||||||
from . import AppContext
|
from . import AppContext
|
||||||
from .. import exceptions
|
|
||||||
from ..utils import expand_path
|
|
||||||
from ..utils import synchronized
|
|
||||||
|
|
||||||
SUFFIX = ".fetch"
|
SUFFIX = ".fetch"
|
||||||
|
|
||||||
|
|
@ -30,16 +33,14 @@ def expand_fetch_params(config):
|
||||||
@synchronized()
|
@synchronized()
|
||||||
def _fetch_value(opts, key):
|
def _fetch_value(opts, key):
|
||||||
if not isinstance(opts, list):
|
if not isinstance(opts, list):
|
||||||
raise ValueError(
|
raise ValueError(f"Invalid value for {key}: Expected a list, found {opts!r}.")
|
||||||
"Invalid value for {}: Expected a list, found {!r}.".format(key, opts)
|
|
||||||
)
|
|
||||||
if not opts:
|
if not opts:
|
||||||
raise ValueError("Expected list of length > 0.")
|
raise ValueError("Expected list of length > 0.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ctx = click.get_current_context().find_object(AppContext)
|
ctx = click.get_current_context().find_object(AppContext)
|
||||||
if ctx is None:
|
if ctx is None:
|
||||||
raise RuntimeError()
|
raise RuntimeError
|
||||||
password_cache = ctx.fetched_params
|
password_cache = ctx.fetched_params
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
password_cache = {}
|
password_cache = {}
|
||||||
|
|
@ -58,7 +59,7 @@ def _fetch_value(opts, key):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise exceptions.UserError(f"Unknown strategy: {strategy}")
|
raise exceptions.UserError(f"Unknown strategy: {strategy}")
|
||||||
|
|
||||||
logger.debug("Fetching value for {} with {} strategy.".format(key, strategy))
|
logger.debug(f"Fetching value for {key} with {strategy} strategy.")
|
||||||
try:
|
try:
|
||||||
rv = strategy_fn(*opts[1:])
|
rv = strategy_fn(*opts[1:])
|
||||||
except (click.Abort, KeyboardInterrupt) as e:
|
except (click.Abort, KeyboardInterrupt) as e:
|
||||||
|
|
@ -67,24 +68,32 @@ def _fetch_value(opts, key):
|
||||||
else:
|
else:
|
||||||
if not rv:
|
if not rv:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Empty value for {}, this most likely "
|
f"Empty value for {key}, this most likely indicates an error."
|
||||||
"indicates an error.".format(key)
|
|
||||||
)
|
)
|
||||||
password_cache[cache_key] = rv
|
password_cache[cache_key] = rv
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def _strategy_command(*command):
|
def _strategy_command(*command: str, shell: bool = False):
|
||||||
|
"""Execute a user-specified command and return its output."""
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
command = (expand_path(command[0]),) + command[1:]
|
# Normalize path of every path member.
|
||||||
|
# If there is no path specified then nothing will happen.
|
||||||
|
# Makes this a list to avoid it being exhausted on the first iteration.
|
||||||
|
expanded_command = list(map(expand_path, command))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stdout = subprocess.check_output(command, universal_newlines=True)
|
stdout = subprocess.check_output(expanded_command, text=True, shell=shell)
|
||||||
return stdout.strip("\n")
|
return stdout.strip("\n")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise exceptions.UserError(
|
cmd = " ".join(expanded_command)
|
||||||
"Failed to execute command: {}\n{}".format(" ".join(command), str(e))
|
raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}")
|
||||||
)
|
|
||||||
|
|
||||||
|
def _strategy_shell(*command: str):
|
||||||
|
"""Execute a user-specified command string in a shell and return its output."""
|
||||||
|
return _strategy_command(*command, shell=True)
|
||||||
|
|
||||||
|
|
||||||
def _strategy_prompt(text):
|
def _strategy_prompt(text):
|
||||||
|
|
@ -93,5 +102,6 @@ def _strategy_prompt(text):
|
||||||
|
|
||||||
STRATEGIES = {
|
STRATEGIES = {
|
||||||
"command": _strategy_command,
|
"command": _strategy_command,
|
||||||
|
"shell": _strategy_shell,
|
||||||
"prompt": _strategy_prompt,
|
"prompt": _strategy_prompt,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,62 +1,64 @@
|
||||||
import functools
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from .. import exceptions
|
import aiohttp
|
||||||
from .. import sync
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer import sync
|
||||||
|
|
||||||
from .config import CollectionConfig
|
from .config import CollectionConfig
|
||||||
|
from .discover import DiscoverResult
|
||||||
from .discover import collections_for_pair
|
from .discover import collections_for_pair
|
||||||
from .discover import storage_class_from_config
|
|
||||||
from .discover import storage_instance_from_config
|
from .discover import storage_instance_from_config
|
||||||
|
from .utils import JobFailed
|
||||||
from .utils import cli_logger
|
from .utils import cli_logger
|
||||||
from .utils import get_status_name
|
from .utils import get_status_name
|
||||||
from .utils import handle_cli_error
|
from .utils import handle_cli_error
|
||||||
from .utils import JobFailed
|
|
||||||
from .utils import load_status
|
from .utils import load_status
|
||||||
from .utils import manage_sync_status
|
from .utils import manage_sync_status
|
||||||
from .utils import save_status
|
from .utils import save_status
|
||||||
|
|
||||||
|
|
||||||
def prepare_pair(wq, pair_name, collections, config, callback, **kwargs):
|
async def prepare_pair(pair_name, collections, config, *, connector):
|
||||||
pair = config.get_pair(pair_name)
|
pair = config.get_pair(pair_name)
|
||||||
|
|
||||||
all_collections = dict(
|
all_collections = dict(
|
||||||
collections_for_pair(status_path=config.general["status_path"], pair=pair)
|
await collections_for_pair(
|
||||||
|
status_path=config.general["status_path"],
|
||||||
|
pair=pair,
|
||||||
|
connector=connector,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# spawn one worker less because we can reuse the current one
|
|
||||||
new_workers = -1
|
|
||||||
for collection_name in collections or all_collections:
|
for collection_name in collections or all_collections:
|
||||||
try:
|
try:
|
||||||
config_a, config_b = all_collections[collection_name]
|
config_a, config_b = all_collections[collection_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Pair {}: Collection {} not found. These are the "
|
f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found."
|
||||||
"configured collections:\n{}".format(
|
f"These are the configured collections:\n{list(all_collections)}"
|
||||||
pair_name, json.dumps(collection_name), list(all_collections)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
new_workers += 1
|
|
||||||
|
|
||||||
collection = CollectionConfig(pair, collection_name, config_a, config_b)
|
collection = CollectionConfig(pair, collection_name, config_a, config_b)
|
||||||
wq.put(
|
yield collection, config.general
|
||||||
functools.partial(
|
|
||||||
callback, collection=collection, general=config.general, **kwargs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for _ in range(new_workers):
|
|
||||||
wq.spawn_worker()
|
|
||||||
|
|
||||||
|
|
||||||
def sync_collection(wq, collection, general, force_delete):
|
async def sync_collection(
|
||||||
|
collection,
|
||||||
|
general,
|
||||||
|
force_delete,
|
||||||
|
*,
|
||||||
|
connector: aiohttp.TCPConnector,
|
||||||
|
):
|
||||||
pair = collection.pair
|
pair = collection.pair
|
||||||
status_name = get_status_name(pair.name, collection.name)
|
status_name = get_status_name(pair.name, collection.name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cli_logger.info(f"Syncing {status_name}")
|
cli_logger.info(f"Syncing {status_name}")
|
||||||
|
|
||||||
a = storage_instance_from_config(collection.config_a)
|
a = await storage_instance_from_config(collection.config_a, connector=connector)
|
||||||
b = storage_instance_from_config(collection.config_b)
|
b = await storage_instance_from_config(collection.config_b, connector=connector)
|
||||||
|
|
||||||
sync_failed = False
|
sync_failed = False
|
||||||
|
|
||||||
|
|
@ -68,7 +70,7 @@ def sync_collection(wq, collection, general, force_delete):
|
||||||
with manage_sync_status(
|
with manage_sync_status(
|
||||||
general["status_path"], pair.name, collection.name
|
general["status_path"], pair.name, collection.name
|
||||||
) as status:
|
) as status:
|
||||||
sync.sync(
|
await sync.sync(
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
status,
|
status,
|
||||||
|
|
@ -79,57 +81,61 @@ def sync_collection(wq, collection, general, force_delete):
|
||||||
)
|
)
|
||||||
|
|
||||||
if sync_failed:
|
if sync_failed:
|
||||||
raise JobFailed()
|
raise JobFailed
|
||||||
except JobFailed:
|
except JobFailed:
|
||||||
raise
|
raise
|
||||||
except BaseException:
|
except BaseException:
|
||||||
handle_cli_error(status_name)
|
handle_cli_error(status_name)
|
||||||
raise JobFailed()
|
raise JobFailed
|
||||||
|
|
||||||
|
|
||||||
def discover_collections(wq, pair, **kwargs):
|
async def discover_collections(pair, **kwargs):
|
||||||
rv = collections_for_pair(pair=pair, **kwargs)
|
rv = await collections_for_pair(pair=pair, **kwargs)
|
||||||
collections = list(c for c, (a, b) in rv)
|
collections = [c for c, (a, b) in rv]
|
||||||
if collections == [None]:
|
if collections == [None]:
|
||||||
collections = None
|
collections = None
|
||||||
cli_logger.info(
|
cli_logger.info(f"Saved for {pair.name}: collections = {json.dumps(collections)}")
|
||||||
"Saved for {}: collections = {}".format(pair.name, json.dumps(collections))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def repair_collection(config, collection, repair_unsafe_uid):
|
async def repair_collection(
|
||||||
from ..repair import repair_storage
|
config,
|
||||||
|
collection,
|
||||||
|
repair_unsafe_uid,
|
||||||
|
*,
|
||||||
|
connector: aiohttp.TCPConnector,
|
||||||
|
):
|
||||||
|
from vdirsyncer.repair import repair_storage
|
||||||
|
|
||||||
storage_name, collection = collection, None
|
storage_name, collection = collection, None
|
||||||
if "/" in storage_name:
|
if "/" in storage_name:
|
||||||
storage_name, collection = storage_name.split("/")
|
storage_name, collection = storage_name.split("/")
|
||||||
|
|
||||||
config = config.get_storage_args(storage_name)
|
config = config.get_storage_args(storage_name)
|
||||||
storage_type = config["type"]
|
# If storage type has a slash, ignore it and anything after it.
|
||||||
|
storage_type = config["type"].split("/")[0]
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
cli_logger.info("Discovering collections (skipping cache).")
|
cli_logger.info("Discovering collections (skipping cache).")
|
||||||
cls, config = storage_class_from_config(config)
|
get_discovered = DiscoverResult(config, connector=connector)
|
||||||
for config in cls.discover(**config):
|
discovered = await get_discovered.get_self()
|
||||||
|
for config in discovered.values():
|
||||||
if config["collection"] == collection:
|
if config["collection"] == collection:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Couldn't find collection {} for storage {}.".format(
|
f"Couldn't find collection {collection} for storage {storage_name}."
|
||||||
collection, storage_name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
config["type"] = storage_type
|
config["type"] = storage_type
|
||||||
storage = storage_instance_from_config(config)
|
storage = await storage_instance_from_config(config, connector=connector)
|
||||||
|
|
||||||
cli_logger.info(f"Repairing {storage_name}/{collection}")
|
cli_logger.info(f"Repairing {storage_name}/{collection}")
|
||||||
cli_logger.warning("Make sure no other program is talking to the server.")
|
cli_logger.warning("Make sure no other program is talking to the server.")
|
||||||
repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid)
|
await repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid)
|
||||||
|
|
||||||
|
|
||||||
def metasync_collection(wq, collection, general):
|
async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector):
|
||||||
from ..metasync import metasync
|
from vdirsyncer.metasync import metasync
|
||||||
|
|
||||||
pair = collection.pair
|
pair = collection.pair
|
||||||
status_name = get_status_name(pair.name, collection.name)
|
status_name = get_status_name(pair.name, collection.name)
|
||||||
|
|
@ -137,17 +143,17 @@ def metasync_collection(wq, collection, general):
|
||||||
try:
|
try:
|
||||||
cli_logger.info(f"Metasyncing {status_name}")
|
cli_logger.info(f"Metasyncing {status_name}")
|
||||||
|
|
||||||
status = (
|
status = load_status(
|
||||||
load_status(
|
general["status_path"],
|
||||||
general["status_path"], pair.name, collection.name, data_type="metadata"
|
pair.name,
|
||||||
)
|
collection.name,
|
||||||
or {}
|
data_type="metadata",
|
||||||
)
|
)
|
||||||
|
|
||||||
a = storage_instance_from_config(collection.config_a)
|
a = await storage_instance_from_config(collection.config_a, connector=connector)
|
||||||
b = storage_instance_from_config(collection.config_b)
|
b = await storage_instance_from_config(collection.config_b, connector=connector)
|
||||||
|
|
||||||
metasync(
|
await metasync(
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
status,
|
status,
|
||||||
|
|
@ -156,12 +162,12 @@ def metasync_collection(wq, collection, general):
|
||||||
)
|
)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
handle_cli_error(status_name)
|
handle_cli_error(status_name)
|
||||||
raise JobFailed()
|
raise JobFailed
|
||||||
|
|
||||||
save_status(
|
save_status(
|
||||||
general["status_path"],
|
base_path=general["status_path"],
|
||||||
pair.name,
|
pair=pair.name,
|
||||||
collection.name,
|
|
||||||
data_type="metadata",
|
data_type="metadata",
|
||||||
data=status,
|
data=status,
|
||||||
|
collection=collection.name,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,48 +1,48 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import importlib
|
import importlib
|
||||||
import itertools
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import queue
|
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import click
|
import click
|
||||||
import click_threading
|
|
||||||
from atomicwrites import atomic_write
|
from vdirsyncer import BUGTRACKER_HOME
|
||||||
|
from vdirsyncer import DOCS_HOME
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.storage.base import Storage
|
||||||
|
from vdirsyncer.sync.exceptions import IdentConflict
|
||||||
|
from vdirsyncer.sync.exceptions import PartialSync
|
||||||
|
from vdirsyncer.sync.exceptions import StorageEmpty
|
||||||
|
from vdirsyncer.sync.exceptions import SyncConflict
|
||||||
|
from vdirsyncer.sync.status import SqliteStatus
|
||||||
|
from vdirsyncer.utils import atomic_write
|
||||||
|
from vdirsyncer.utils import expand_path
|
||||||
|
from vdirsyncer.utils import get_storage_init_args
|
||||||
|
|
||||||
from . import cli_logger
|
from . import cli_logger
|
||||||
from .. import BUGTRACKER_HOME
|
|
||||||
from .. import DOCS_HOME
|
|
||||||
from .. import exceptions
|
|
||||||
from ..sync.exceptions import IdentConflict
|
|
||||||
from ..sync.exceptions import PartialSync
|
|
||||||
from ..sync.exceptions import StorageEmpty
|
|
||||||
from ..sync.exceptions import SyncConflict
|
|
||||||
from ..sync.status import SqliteStatus
|
|
||||||
from ..utils import expand_path
|
|
||||||
from ..utils import get_storage_init_args
|
|
||||||
|
|
||||||
|
|
||||||
STATUS_PERMISSIONS = 0o600
|
STATUS_PERMISSIONS = 0o600
|
||||||
STATUS_DIR_PERMISSIONS = 0o700
|
STATUS_DIR_PERMISSIONS = 0o700
|
||||||
|
|
||||||
|
|
||||||
class _StorageIndex:
|
class _StorageIndex:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
self._storages = dict(
|
self._storages: dict[str, str] = {
|
||||||
caldav="vdirsyncer.storage.dav.CalDAVStorage",
|
"caldav": "vdirsyncer.storage.dav.CalDAVStorage",
|
||||||
carddav="vdirsyncer.storage.dav.CardDAVStorage",
|
"carddav": "vdirsyncer.storage.dav.CardDAVStorage",
|
||||||
filesystem="vdirsyncer.storage.filesystem.FilesystemStorage",
|
"filesystem": "vdirsyncer.storage.filesystem.FilesystemStorage",
|
||||||
http="vdirsyncer.storage.http.HttpStorage",
|
"http": "vdirsyncer.storage.http.HttpStorage",
|
||||||
singlefile="vdirsyncer.storage.singlefile.SingleFileStorage",
|
"singlefile": "vdirsyncer.storage.singlefile.SingleFileStorage",
|
||||||
google_calendar="vdirsyncer.storage.google.GoogleCalendarStorage",
|
"google_calendar": "vdirsyncer.storage.google.GoogleCalendarStorage",
|
||||||
google_contacts="vdirsyncer.storage.google.GoogleContactsStorage",
|
"google_contacts": "vdirsyncer.storage.google.GoogleContactsStorage",
|
||||||
etesync_calendars="vdirsyncer.storage.etesync.EtesyncCalendars",
|
}
|
||||||
etesync_contacts="vdirsyncer.storage.etesync.EtesyncContacts",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name: str) -> Storage:
|
||||||
item = self._storages[name]
|
item = self._storages[name]
|
||||||
if not isinstance(item, str):
|
if not isinstance(item, str):
|
||||||
return item
|
return item
|
||||||
|
|
@ -79,33 +79,27 @@ def handle_cli_error(status_name=None, e=None):
|
||||||
cli_logger.critical(e)
|
cli_logger.critical(e)
|
||||||
except StorageEmpty as e:
|
except StorageEmpty as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
'{status_name}: Storage "{name}" was completely emptied. If you '
|
f'{status_name}: Storage "{e.empty_storage.instance_name}" was '
|
||||||
"want to delete ALL entries on BOTH sides, then use "
|
"completely emptied. If you want to delete ALL entries on BOTH sides,"
|
||||||
"`vdirsyncer sync --force-delete {status_name}`. "
|
f"then use `vdirsyncer sync --force-delete {status_name}`. "
|
||||||
"Otherwise delete the files for {status_name} in your status "
|
f"Otherwise delete the files for {status_name} in your status "
|
||||||
"directory.".format(
|
"directory."
|
||||||
name=e.empty_storage.instance_name, status_name=status_name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
except PartialSync as e:
|
except PartialSync as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
"{status_name}: Attempted change on {storage}, which is read-only"
|
f"{status_name}: Attempted change on {e.storage}, which is read-only"
|
||||||
". Set `partial_sync` in your pair section to `ignore` to ignore "
|
". Set `partial_sync` in your pair section to `ignore` to ignore "
|
||||||
"those changes, or `revert` to revert them on the other side.".format(
|
"those changes, or `revert` to revert them on the other side."
|
||||||
status_name=status_name, storage=e.storage
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
except SyncConflict as e:
|
except SyncConflict as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
"{status_name}: One item changed on both sides. Resolve this "
|
f"{status_name}: One item changed on both sides. Resolve this "
|
||||||
"conflict manually, or by setting the `conflict_resolution` "
|
"conflict manually, or by setting the `conflict_resolution` "
|
||||||
"parameter in your config file.\n"
|
"parameter in your config file.\n"
|
||||||
"See also {docs}/config.html#pair-section\n"
|
f"See also {DOCS_HOME}/config.html#pair-section\n"
|
||||||
"Item ID: {e.ident}\n"
|
f"Item ID: {e.ident}\n"
|
||||||
"Item href on side A: {e.href_a}\n"
|
f"Item href on side A: {e.href_a}\n"
|
||||||
"Item href on side B: {e.href_b}\n".format(
|
f"Item href on side B: {e.href_b}\n"
|
||||||
status_name=status_name, e=e, docs=DOCS_HOME
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
except IdentConflict as e:
|
except IdentConflict as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
|
|
@ -126,17 +120,17 @@ def handle_cli_error(status_name=None, e=None):
|
||||||
pass
|
pass
|
||||||
except exceptions.PairNotFound as e:
|
except exceptions.PairNotFound as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
"Pair {pair_name} does not exist. Please check your "
|
f"Pair {e.pair_name} does not exist. Please check your "
|
||||||
"configuration file and make sure you've typed the pair name "
|
"configuration file and make sure you've typed the pair name "
|
||||||
"correctly".format(pair_name=e.pair_name)
|
"correctly"
|
||||||
)
|
)
|
||||||
except exceptions.InvalidResponse as e:
|
except exceptions.InvalidResponse as e:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
"The server returned something vdirsyncer doesn't understand. "
|
"The server returned something vdirsyncer doesn't understand. "
|
||||||
"Error message: {!r}\n"
|
f"Error message: {e!r}\n"
|
||||||
"While this is most likely a serverside problem, the vdirsyncer "
|
"While this is most likely a serverside problem, the vdirsyncer "
|
||||||
"devs are generally interested in such bugs. Please report it in "
|
"devs are generally interested in such bugs. Please report it in "
|
||||||
"the issue tracker at {}".format(e, BUGTRACKER_HOME)
|
f"the issue tracker at {BUGTRACKER_HOME}"
|
||||||
)
|
)
|
||||||
except exceptions.CollectionRequired:
|
except exceptions.CollectionRequired:
|
||||||
cli_logger.error(
|
cli_logger.error(
|
||||||
|
|
@ -159,32 +153,40 @@ def handle_cli_error(status_name=None, e=None):
|
||||||
cli_logger.debug("".join(tb))
|
cli_logger.debug("".join(tb))
|
||||||
|
|
||||||
|
|
||||||
def get_status_name(pair, collection):
|
def get_status_name(pair: str, collection: str | None) -> str:
|
||||||
if collection is None:
|
if collection is None:
|
||||||
return pair
|
return pair
|
||||||
return pair + "/" + collection
|
return pair + "/" + collection
|
||||||
|
|
||||||
|
|
||||||
def get_status_path(base_path, pair, collection=None, data_type=None):
|
def get_status_path(
|
||||||
|
base_path: str,
|
||||||
|
pair: str,
|
||||||
|
collection: str | None = None,
|
||||||
|
data_type: str | None = None,
|
||||||
|
) -> str:
|
||||||
assert data_type is not None
|
assert data_type is not None
|
||||||
status_name = get_status_name(pair, collection)
|
status_name = get_status_name(pair, collection)
|
||||||
path = expand_path(os.path.join(base_path, status_name))
|
path = expand_path(os.path.join(base_path, status_name))
|
||||||
if os.path.isfile(path) and data_type == "items":
|
if os.path.isfile(path) and data_type == "items":
|
||||||
new_path = path + ".items"
|
new_path = path + ".items"
|
||||||
# XXX: Legacy migration
|
# XXX: Legacy migration
|
||||||
cli_logger.warning(
|
cli_logger.warning(f"Migrating statuses: Renaming {path} to {new_path}")
|
||||||
"Migrating statuses: Renaming {} to {}".format(path, new_path)
|
|
||||||
)
|
|
||||||
os.rename(path, new_path)
|
os.rename(path, new_path)
|
||||||
|
|
||||||
path += "." + data_type
|
path += "." + data_type
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def load_status(base_path, pair, collection=None, data_type=None):
|
def load_status(
|
||||||
|
base_path: str,
|
||||||
|
pair: str,
|
||||||
|
collection: str | None = None,
|
||||||
|
data_type: str | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
path = get_status_path(base_path, pair, collection, data_type)
|
path = get_status_path(base_path, pair, collection, data_type)
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return None
|
return {}
|
||||||
assert_permissions(path, STATUS_PERMISSIONS)
|
assert_permissions(path, STATUS_PERMISSIONS)
|
||||||
|
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
|
|
@ -196,7 +198,7 @@ def load_status(base_path, pair, collection=None, data_type=None):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def prepare_status_path(path):
|
def prepare_status_path(path: str) -> None:
|
||||||
dirname = os.path.dirname(path)
|
dirname = os.path.dirname(path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -207,7 +209,7 @@ def prepare_status_path(path):
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def manage_sync_status(base_path, pair_name, collection_name):
|
def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
|
||||||
path = get_status_path(base_path, pair_name, collection_name, "items")
|
path = get_status_path(base_path, pair_name, collection_name, "items")
|
||||||
status = None
|
status = None
|
||||||
legacy_status = None
|
legacy_status = None
|
||||||
|
|
@ -229,12 +231,17 @@ def manage_sync_status(base_path, pair_name, collection_name):
|
||||||
prepare_status_path(path)
|
prepare_status_path(path)
|
||||||
status = SqliteStatus(path)
|
status = SqliteStatus(path)
|
||||||
|
|
||||||
yield status
|
with contextlib.closing(status):
|
||||||
|
yield status
|
||||||
|
|
||||||
|
|
||||||
def save_status(base_path, pair, collection=None, data_type=None, data=None):
|
def save_status(
|
||||||
assert data_type is not None
|
base_path: str,
|
||||||
assert data is not None
|
pair: str,
|
||||||
|
data_type: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
collection: str | None = None,
|
||||||
|
) -> None:
|
||||||
status_name = get_status_name(pair, collection)
|
status_name = get_status_name(pair, collection)
|
||||||
path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
|
path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
|
||||||
prepare_status_path(path)
|
prepare_status_path(path)
|
||||||
|
|
@ -255,24 +262,38 @@ def storage_class_from_config(config):
|
||||||
return cls, config
|
return cls, config
|
||||||
|
|
||||||
|
|
||||||
def storage_instance_from_config(config, create=True):
|
async def storage_instance_from_config(
|
||||||
|
config,
|
||||||
|
create=True,
|
||||||
|
*,
|
||||||
|
connector: aiohttp.TCPConnector,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param config: A configuration dictionary to pass as kwargs to the class
|
:param config: A configuration dictionary to pass as kwargs to the class
|
||||||
corresponding to config['type']
|
corresponding to config['type']
|
||||||
"""
|
"""
|
||||||
|
from vdirsyncer.storage.dav import DAVStorage
|
||||||
|
from vdirsyncer.storage.http import HttpStorage
|
||||||
|
|
||||||
cls, new_config = storage_class_from_config(config)
|
cls, new_config = storage_class_from_config(config)
|
||||||
|
|
||||||
|
if issubclass(cls, DAVStorage) or issubclass(cls, HttpStorage):
|
||||||
|
assert connector is not None # FIXME: hack?
|
||||||
|
new_config["connector"] = connector
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return cls(**new_config)
|
return cls(**new_config)
|
||||||
except exceptions.CollectionNotFound as e:
|
except exceptions.CollectionNotFound as e:
|
||||||
if create:
|
if create:
|
||||||
config = handle_collection_not_found(
|
config = await handle_collection_not_found(
|
||||||
config, config.get("collection", None), e=str(e)
|
config, config.get("collection", None), e=str(e), implicit_create=True
|
||||||
)
|
)
|
||||||
return storage_instance_from_config(config, create=False)
|
return await storage_instance_from_config(
|
||||||
else:
|
config,
|
||||||
raise
|
create=False,
|
||||||
|
connector=connector,
|
||||||
|
)
|
||||||
|
raise
|
||||||
except Exception:
|
except Exception:
|
||||||
return handle_storage_init_error(cls, new_config)
|
return handle_storage_init_error(cls, new_config)
|
||||||
|
|
||||||
|
|
@ -311,104 +332,18 @@ def handle_storage_init_error(cls, config):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkerQueue:
|
def assert_permissions(path: str, wanted: int) -> None:
|
||||||
"""
|
|
||||||
A simple worker-queue setup.
|
|
||||||
|
|
||||||
Note that workers quit if queue is empty. That means you have to first put
|
|
||||||
things into the queue before spawning the worker!
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, max_workers):
|
|
||||||
self._queue = queue.Queue()
|
|
||||||
self._workers = []
|
|
||||||
self._max_workers = max_workers
|
|
||||||
self._shutdown_handlers = []
|
|
||||||
|
|
||||||
# According to http://stackoverflow.com/a/27062830, those are
|
|
||||||
# threadsafe compared to increasing a simple integer variable.
|
|
||||||
self.num_done_tasks = itertools.count()
|
|
||||||
self.num_failed_tasks = itertools.count()
|
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
while self._shutdown_handlers:
|
|
||||||
try:
|
|
||||||
self._shutdown_handlers.pop()()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _worker(self):
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
func = self._queue.get(False)
|
|
||||||
except queue.Empty:
|
|
||||||
break
|
|
||||||
|
|
||||||
try:
|
|
||||||
func(wq=self)
|
|
||||||
except Exception:
|
|
||||||
handle_cli_error()
|
|
||||||
next(self.num_failed_tasks)
|
|
||||||
finally:
|
|
||||||
self._queue.task_done()
|
|
||||||
next(self.num_done_tasks)
|
|
||||||
if not self._queue.unfinished_tasks:
|
|
||||||
self.shutdown()
|
|
||||||
|
|
||||||
def spawn_worker(self):
|
|
||||||
if self._max_workers and len(self._workers) >= self._max_workers:
|
|
||||||
return
|
|
||||||
|
|
||||||
t = click_threading.Thread(target=self._worker)
|
|
||||||
t.start()
|
|
||||||
self._workers.append(t)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def join(self):
|
|
||||||
assert self._workers or not self._queue.unfinished_tasks
|
|
||||||
ui_worker = click_threading.UiWorker()
|
|
||||||
self._shutdown_handlers.append(ui_worker.shutdown)
|
|
||||||
_echo = click.echo
|
|
||||||
|
|
||||||
with ui_worker.patch_click():
|
|
||||||
yield
|
|
||||||
|
|
||||||
if not self._workers:
|
|
||||||
# Ugly hack, needed because ui_worker is not running.
|
|
||||||
click.echo = _echo
|
|
||||||
cli_logger.critical("Nothing to do.")
|
|
||||||
sys.exit(5)
|
|
||||||
|
|
||||||
ui_worker.run()
|
|
||||||
self._queue.join()
|
|
||||||
for worker in self._workers:
|
|
||||||
worker.join()
|
|
||||||
|
|
||||||
tasks_failed = next(self.num_failed_tasks)
|
|
||||||
tasks_done = next(self.num_done_tasks)
|
|
||||||
|
|
||||||
if tasks_failed > 0:
|
|
||||||
cli_logger.error(
|
|
||||||
"{} out of {} tasks failed.".format(tasks_failed, tasks_done)
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def put(self, f):
|
|
||||||
return self._queue.put(f)
|
|
||||||
|
|
||||||
|
|
||||||
def assert_permissions(path, wanted):
|
|
||||||
permissions = os.stat(path).st_mode & 0o777
|
permissions = os.stat(path).st_mode & 0o777
|
||||||
if permissions > wanted:
|
if permissions > wanted:
|
||||||
cli_logger.warning(
|
cli_logger.warning(
|
||||||
"Correcting permissions of {} from {:o} to {:o}".format(
|
f"Correcting permissions of {path} from {permissions:o} to {wanted:o}"
|
||||||
path, permissions, wanted
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
os.chmod(path, wanted)
|
os.chmod(path, wanted)
|
||||||
|
|
||||||
|
|
||||||
def handle_collection_not_found(config, collection, e=None):
|
async def handle_collection_not_found(
|
||||||
|
config, collection, e=None, implicit_create=False
|
||||||
|
):
|
||||||
storage_name = config.get("instance_name", None)
|
storage_name = config.get("instance_name", None)
|
||||||
|
|
||||||
cli_logger.warning(
|
cli_logger.warning(
|
||||||
|
|
@ -417,19 +352,19 @@ def handle_collection_not_found(config, collection, e=None):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if click.confirm("Should vdirsyncer attempt to create it?"):
|
if implicit_create or click.confirm("Should vdirsyncer attempt to create it?"):
|
||||||
storage_type = config["type"]
|
storage_type = config["type"]
|
||||||
cls, config = storage_class_from_config(config)
|
cls, config = storage_class_from_config(config)
|
||||||
config["collection"] = collection
|
config["collection"] = collection
|
||||||
try:
|
try:
|
||||||
args = cls.create_collection(**config)
|
args = await cls.create_collection(**config)
|
||||||
args["type"] = storage_type
|
args["type"] = storage_type
|
||||||
return args
|
return args
|
||||||
except NotImplementedError as e:
|
except NotImplementedError as e:
|
||||||
cli_logger.error(e)
|
cli_logger.error(e)
|
||||||
|
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
'Unable to find or create collection "{collection}" for '
|
f'Unable to find or create collection "{collection}" for '
|
||||||
'storage "{storage}". Please create the collection '
|
f'storage "{storage_name}". Please create the collection '
|
||||||
"yourself.".format(collection=collection, storage=storage_name)
|
"yourself."
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,8 @@ Contains exception classes used by vdirsyncer. Not all exceptions are here,
|
||||||
only the most commonly used ones.
|
only the most commonly used ones.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
"""Baseclass for all errors."""
|
"""Baseclass for all errors."""
|
||||||
|
|
|
||||||
|
|
@ -1,94 +1,150 @@
|
||||||
import logging
|
from __future__ import annotations
|
||||||
|
|
||||||
import requests
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
from abc import ABC
|
||||||
|
from abc import abstractmethod
|
||||||
|
from base64 import b64encode
|
||||||
|
from ssl import create_default_context
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import requests.auth
|
||||||
|
from aiohttp import ServerDisconnectedError
|
||||||
|
from aiohttp import ServerTimeoutError
|
||||||
|
from requests.utils import parse_dict_header
|
||||||
|
from tenacity import retry
|
||||||
|
from tenacity import retry_if_exception_type
|
||||||
|
from tenacity import stop_after_attempt
|
||||||
|
from tenacity import wait_exponential
|
||||||
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
from . import DOCS_HOME
|
|
||||||
from . import exceptions
|
from . import exceptions
|
||||||
from .utils import expand_path
|
from .utils import expand_path
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
USERAGENT = f"vdirsyncer/{__version__}"
|
USERAGENT = f"vdirsyncer/{__version__}"
|
||||||
|
|
||||||
|
# 'hack' to prevent aiohttp from loading the netrc config,
|
||||||
def _detect_faulty_requests(): # pragma: no cover
|
# but still allow it to read PROXY_* env vars.
|
||||||
text = (
|
# Otherwise, if our host is defined in the netrc config,
|
||||||
"Error during import: {e}\n\n"
|
# aiohttp will overwrite our Authorization header.
|
||||||
"If you have installed vdirsyncer from a distro package, please file "
|
# https://github.com/pimutils/vdirsyncer/issues/1138
|
||||||
"a bug against that package, not vdirsyncer.\n\n"
|
os.environ["NETRC"] = "NUL" if platform.system() == "Windows" else "/dev/null"
|
||||||
"Consult {d}/problems.html#requests-related-importerrors"
|
|
||||||
"-based-distributions on how to work around this."
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from requests_toolbelt.auth.guess import GuessAuth # noqa
|
|
||||||
except ImportError as e:
|
|
||||||
import sys
|
|
||||||
|
|
||||||
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
_detect_faulty_requests()
|
class AuthMethod(ABC):
|
||||||
del _detect_faulty_requests
|
def __init__(self, username, password):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def handle_401(self, response):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_auth_header(self, method, url):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, AuthMethod):
|
||||||
|
return False
|
||||||
|
return (
|
||||||
|
self.__class__ == other.__class__
|
||||||
|
and self.username == other.username
|
||||||
|
and self.password == other.password
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BasicAuthMethod(AuthMethod):
|
||||||
|
def handle_401(self, _response):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_auth_header(self, _method, _url):
|
||||||
|
auth_str = f"{self.username}:{self.password}"
|
||||||
|
return "Basic " + b64encode(auth_str.encode("utf-8")).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
class DigestAuthMethod(AuthMethod):
|
||||||
|
# make class var to 'cache' the state, which is more efficient because otherwise
|
||||||
|
# each request would first require another 'initialization' request.
|
||||||
|
_auth_helpers: dict[tuple[str, str], requests.auth.HTTPDigestAuth] = {}
|
||||||
|
|
||||||
|
def __init__(self, username: str, password: str):
|
||||||
|
super().__init__(username, password)
|
||||||
|
|
||||||
|
self._auth_helper = self._auth_helpers.get(
|
||||||
|
(username, password), requests.auth.HTTPDigestAuth(username, password)
|
||||||
|
)
|
||||||
|
self._auth_helpers[(username, password)] = self._auth_helper
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auth_helper_vars(self):
|
||||||
|
return self._auth_helper._thread_local
|
||||||
|
|
||||||
|
def handle_401(self, response):
|
||||||
|
s_auth = response.headers.get("www-authenticate", "")
|
||||||
|
|
||||||
|
if "digest" in s_auth.lower():
|
||||||
|
# Original source:
|
||||||
|
# https://github.com/psf/requests/blob/f12ccbef6d6b95564da8d22e280d28c39d53f0e9/src/requests/auth.py#L262-L263
|
||||||
|
pat = re.compile(r"digest ", flags=re.IGNORECASE)
|
||||||
|
self.auth_helper_vars.chal = parse_dict_header(pat.sub("", s_auth, count=1))
|
||||||
|
|
||||||
|
def get_auth_header(self, method, url):
|
||||||
|
self._auth_helper.init_per_thread_state()
|
||||||
|
|
||||||
|
if not self.auth_helper_vars.chal:
|
||||||
|
# Need to do init request first
|
||||||
|
return ""
|
||||||
|
|
||||||
|
return self._auth_helper.build_digest_header(method, url)
|
||||||
|
|
||||||
|
|
||||||
def prepare_auth(auth, username, password):
|
def prepare_auth(auth, username, password):
|
||||||
if username and password:
|
if username and password:
|
||||||
if auth == "basic" or auth is None:
|
if auth == "basic" or auth is None:
|
||||||
return (username, password)
|
return BasicAuthMethod(username, password)
|
||||||
elif auth == "digest":
|
if auth == "digest":
|
||||||
from requests.auth import HTTPDigestAuth
|
return DigestAuthMethod(username, password)
|
||||||
|
if auth == "guess":
|
||||||
return HTTPDigestAuth(username, password)
|
raise exceptions.UserError(
|
||||||
elif auth == "guess":
|
"'Guess' authentication is not supported in this version of "
|
||||||
try:
|
"vdirsyncer.\n"
|
||||||
from requests_toolbelt.auth.guess import GuessAuth
|
"Please explicitly specify either 'basic' or 'digest' auth instead. \n"
|
||||||
except ImportError:
|
"See the following issue for more information: "
|
||||||
raise exceptions.UserError(
|
"https://github.com/pimutils/vdirsyncer/issues/1015"
|
||||||
"Your version of requests_toolbelt is too "
|
)
|
||||||
"old for `guess` authentication. At least "
|
|
||||||
"version 0.4.0 is required."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return GuessAuth(username, password)
|
|
||||||
else:
|
else:
|
||||||
raise exceptions.UserError("Unknown authentication method: {}".format(auth))
|
raise exceptions.UserError(f"Unknown authentication method: {auth}")
|
||||||
elif auth:
|
elif auth:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"You need to specify username and password "
|
f"You need to specify username and password for {auth} authentication."
|
||||||
"for {} authentication.".format(auth)
|
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def prepare_verify(verify, verify_fingerprint):
|
def prepare_verify(verify, verify_fingerprint):
|
||||||
if isinstance(verify, (str, bytes)):
|
if isinstance(verify, str):
|
||||||
verify = expand_path(verify)
|
return create_default_context(cafile=expand_path(verify))
|
||||||
elif not isinstance(verify, bool):
|
elif verify is not None:
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Invalid value for verify ({}), "
|
f"Invalid value for verify ({verify}), must be a path to a PEM-file."
|
||||||
"must be a path to a PEM-file or boolean.".format(verify)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if verify_fingerprint is not None:
|
if verify_fingerprint is not None:
|
||||||
if not isinstance(verify_fingerprint, (bytes, str)):
|
if not isinstance(verify_fingerprint, str):
|
||||||
raise exceptions.UserError(
|
raise exceptions.UserError(
|
||||||
"Invalid value for verify_fingerprint "
|
"Invalid value for verify_fingerprint "
|
||||||
"({}), must be a string or null.".format(verify_fingerprint)
|
f"({verify_fingerprint}), must be a string."
|
||||||
)
|
)
|
||||||
elif not verify:
|
|
||||||
raise exceptions.UserError(
|
|
||||||
"Disabling all SSL validation is forbidden. Consider setting "
|
|
||||||
"verify_fingerprint if you have a broken or self-signed cert."
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
return aiohttp.Fingerprint(bytes.fromhex(verify_fingerprint.replace(":", "")))
|
||||||
"verify": verify,
|
|
||||||
"verify_fingerprint": verify_fingerprint,
|
return None
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_client_cert(cert):
|
def prepare_client_cert(cert):
|
||||||
|
|
@ -99,31 +155,80 @@ def prepare_client_cert(cert):
|
||||||
return cert
|
return cert
|
||||||
|
|
||||||
|
|
||||||
def _install_fingerprint_adapter(session, fingerprint):
|
class TransientNetworkError(exceptions.Error):
|
||||||
prefix = "https://"
|
"""Transient network condition that should be retried."""
|
||||||
try:
|
|
||||||
from requests_toolbelt.adapters.fingerprint import FingerprintAdapter
|
|
||||||
except ImportError:
|
|
||||||
raise RuntimeError(
|
|
||||||
"`verify_fingerprint` can only be used with "
|
|
||||||
"requests-toolbelt versions >= 0.4.0"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(session.adapters[prefix], FingerprintAdapter):
|
|
||||||
fingerprint_adapter = FingerprintAdapter(fingerprint)
|
|
||||||
session.mount(prefix, fingerprint_adapter)
|
|
||||||
|
|
||||||
|
|
||||||
def request(
|
def _is_safe_to_retry_method(method: str) -> bool:
|
||||||
method, url, session=None, latin1_fallback=True, verify_fingerprint=None, **kwargs
|
"""Returns True if the HTTP method is safe/idempotent to retry.
|
||||||
):
|
|
||||||
|
We consider these safe for our WebDAV usage:
|
||||||
|
- GET, HEAD, OPTIONS: standard safe methods
|
||||||
|
- PROPFIND, REPORT: read-only DAV queries used for listing/fetching
|
||||||
"""
|
"""
|
||||||
Wrapper method for requests, to ease logging and mocking. Parameters should
|
return method.upper() in {"GET", "HEAD", "OPTIONS", "PROPFIND", "REPORT"}
|
||||||
be the same as for ``requests.request``, except:
|
|
||||||
|
|
||||||
|
class UsageLimitReached(exceptions.Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def _is_quota_exceeded_google(response: aiohttp.ClientResponse) -> bool:
|
||||||
|
"""Return True if the response JSON indicates Google-style `usageLimits` exceeded.
|
||||||
|
|
||||||
|
Expected shape:
|
||||||
|
{"error": {"errors": [{"domain": "usageLimits", ...}], ...}}
|
||||||
|
|
||||||
|
See https://developers.google.com/workspace/calendar/api/guides/errors#403_usage_limits_exceeded
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = await response.json(content_type=None)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return False
|
||||||
|
|
||||||
|
error = data.get("error")
|
||||||
|
if not isinstance(error, dict):
|
||||||
|
return False
|
||||||
|
|
||||||
|
errors = error.get("errors")
|
||||||
|
if not isinstance(errors, list):
|
||||||
|
return False
|
||||||
|
|
||||||
|
for entry in errors:
|
||||||
|
if isinstance(entry, dict) and entry.get("domain") == "usageLimits":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@retry(
|
||||||
|
stop=stop_after_attempt(5),
|
||||||
|
wait=wait_exponential(multiplier=1, min=4, max=10),
|
||||||
|
retry=(
|
||||||
|
retry_if_exception_type(UsageLimitReached)
|
||||||
|
| retry_if_exception_type(TransientNetworkError)
|
||||||
|
),
|
||||||
|
reraise=True,
|
||||||
|
)
|
||||||
|
async def request(
|
||||||
|
method,
|
||||||
|
url,
|
||||||
|
session,
|
||||||
|
auth=None,
|
||||||
|
latin1_fallback=True,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
"""Wrapper method for requests, to ease logging and mocking as well as to
|
||||||
|
support auth methods currently unsupported by aiohttp.
|
||||||
|
|
||||||
|
Parameters should be the same as for ``aiohttp.request``, except:
|
||||||
|
|
||||||
:param session: A requests session object to use.
|
:param session: A requests session object to use.
|
||||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
:param auth: The HTTP ``AuthMethod`` to use for authentication.
|
||||||
expected server certificate.
|
:param verify_fingerprint: Optional. SHA256 of the expected server certificate.
|
||||||
:param latin1_fallback: RFC-2616 specifies the default Content-Type of
|
:param latin1_fallback: RFC-2616 specifies the default Content-Type of
|
||||||
text/* to be latin1, which is not always correct, but exactly what
|
text/* to be latin1, which is not always correct, but exactly what
|
||||||
requests is doing. Setting this parameter to False will use charset
|
requests is doing. Setting this parameter to False will use charset
|
||||||
|
|
@ -132,47 +237,93 @@ def request(
|
||||||
https://github.com/kennethreitz/requests/issues/2042
|
https://github.com/kennethreitz/requests/issues/2042
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if session is None:
|
# TODO: Support for client-side certifications.
|
||||||
session = requests.Session()
|
|
||||||
|
|
||||||
if verify_fingerprint is not None:
|
|
||||||
_install_fingerprint_adapter(session, verify_fingerprint)
|
|
||||||
|
|
||||||
session.hooks = {"response": _fix_redirects}
|
session.hooks = {"response": _fix_redirects}
|
||||||
|
|
||||||
func = session.request
|
# TODO: rewrite using
|
||||||
|
# https://docs.aiohttp.org/en/stable/client_advanced.html#client-tracing
|
||||||
logger.debug("=" * 20)
|
logger.debug("=" * 20)
|
||||||
logger.debug(f"{method} {url}")
|
logger.debug(f"{method} {url}")
|
||||||
logger.debug(kwargs.get("headers", {}))
|
logger.debug(kwargs.get("headers", {}))
|
||||||
logger.debug(kwargs.get("data", None))
|
logger.debug(kwargs.get("data"))
|
||||||
logger.debug("Sending request...")
|
logger.debug("Sending request...")
|
||||||
|
|
||||||
assert isinstance(kwargs.get("data", b""), bytes)
|
assert isinstance(kwargs.get("data", b""), bytes)
|
||||||
|
|
||||||
r = func(method, url, **kwargs)
|
cert = kwargs.pop("cert", None)
|
||||||
|
if cert is not None:
|
||||||
|
ssl_context = kwargs.pop("ssl", create_default_context())
|
||||||
|
ssl_context.load_cert_chain(*cert)
|
||||||
|
kwargs["ssl"] = ssl_context
|
||||||
|
|
||||||
|
headers = kwargs.pop("headers", {})
|
||||||
|
response: aiohttp.ClientResponse | None = None
|
||||||
|
for _attempt in range(2):
|
||||||
|
if auth:
|
||||||
|
headers["Authorization"] = auth.get_auth_header(method, url)
|
||||||
|
try:
|
||||||
|
response = await session.request(method, url, headers=headers, **kwargs)
|
||||||
|
except (
|
||||||
|
ServerDisconnectedError,
|
||||||
|
ServerTimeoutError,
|
||||||
|
asyncio.TimeoutError,
|
||||||
|
) as e:
|
||||||
|
# Retry only if the method is safe/idempotent for our DAV use
|
||||||
|
if _is_safe_to_retry_method(method):
|
||||||
|
logger.debug(
|
||||||
|
f"Transient network error on {method} {url}: {e}. Will retry."
|
||||||
|
)
|
||||||
|
raise TransientNetworkError(str(e)) from e
|
||||||
|
raise e from None
|
||||||
|
|
||||||
|
if response is None:
|
||||||
|
raise RuntimeError("No HTTP response obtained")
|
||||||
|
|
||||||
|
if response.ok or not auth:
|
||||||
|
# we don't need to do the 401-loop if we don't do auth in the first place
|
||||||
|
break
|
||||||
|
|
||||||
|
if response.status == 401:
|
||||||
|
auth.handle_401(response)
|
||||||
|
# retry once more after handling the 401 challenge
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# some other error, will be handled later on
|
||||||
|
break
|
||||||
|
|
||||||
|
if response is None:
|
||||||
|
raise RuntimeError("No HTTP response obtained")
|
||||||
|
|
||||||
# See https://github.com/kennethreitz/requests/issues/2042
|
# See https://github.com/kennethreitz/requests/issues/2042
|
||||||
content_type = r.headers.get("Content-Type", "")
|
content_type = response.headers.get("Content-Type", "")
|
||||||
if (
|
if (
|
||||||
not latin1_fallback
|
not latin1_fallback
|
||||||
and "charset" not in content_type
|
and "charset" not in content_type
|
||||||
and content_type.startswith("text/")
|
and content_type.startswith("text/")
|
||||||
):
|
):
|
||||||
logger.debug("Removing latin1 fallback")
|
logger.debug("Removing latin1 fallback")
|
||||||
r.encoding = None
|
response.encoding = None
|
||||||
|
|
||||||
logger.debug(r.status_code)
|
logger.debug(response.status)
|
||||||
logger.debug(r.headers)
|
logger.debug(response.headers)
|
||||||
logger.debug(r.content)
|
logger.debug(response.content)
|
||||||
|
|
||||||
if r.status_code == 412:
|
if logger.getEffectiveLevel() <= logging.DEBUG and response.status >= 400:
|
||||||
raise exceptions.PreconditionFailed(r.reason)
|
# https://github.com/pimutils/vdirsyncer/issues/1186
|
||||||
if r.status_code in (404, 410):
|
logger.debug(await response.text())
|
||||||
raise exceptions.NotFoundError(r.reason)
|
|
||||||
|
|
||||||
r.raise_for_status()
|
if response.status == 403 and await _is_quota_exceeded_google(response):
|
||||||
return r
|
raise UsageLimitReached(response.reason)
|
||||||
|
if response.status == 412:
|
||||||
|
raise exceptions.PreconditionFailed(response.reason)
|
||||||
|
if response.status in (404, 410):
|
||||||
|
raise exceptions.NotFoundError(response.reason)
|
||||||
|
if response.status == 429:
|
||||||
|
raise UsageLimitReached(response.reason)
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
def _fix_redirects(r, *args, **kwargs):
|
def _fix_redirects(r, *args, **kwargs):
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue